repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 35,512 | java-bigquerydatapolicy/proto-google-cloud-bigquerydatapolicy-v2beta1/src/main/java/com/google/cloud/bigquery/datapolicies/v2beta1/AddGranteesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/datapolicies/v2beta1/datapolicy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.datapolicies.v2beta1;
/**
*
*
* <pre>
* Request message for the AddGrantees method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest}
*/
public final class AddGranteesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest)
AddGranteesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddGranteesRequest.newBuilder() to construct.
private AddGranteesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AddGranteesRequest() {
dataPolicy_ = "";
grantees_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AddGranteesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto
.internal_static_google_cloud_bigquery_datapolicies_v2beta1_AddGranteesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto
.internal_static_google_cloud_bigquery_datapolicies_v2beta1_AddGranteesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.class,
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.Builder.class);
}
public static final int DATA_POLICY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object dataPolicy_ = "";
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The dataPolicy.
*/
@java.lang.Override
public java.lang.String getDataPolicy() {
java.lang.Object ref = dataPolicy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
dataPolicy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for dataPolicy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDataPolicyBytes() {
java.lang.Object ref = dataPolicy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
dataPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GRANTEES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList grantees_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return A list containing the grantees.
*/
public com.google.protobuf.ProtocolStringList getGranteesList() {
return grantees_;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The count of grantees.
*/
public int getGranteesCount() {
return grantees_.size();
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param index The index of the element to return.
* @return The grantees at the given index.
*/
public java.lang.String getGrantees(int index) {
return grantees_.get(index);
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param index The index of the value to return.
* @return The bytes of the grantees at the given index.
*/
public com.google.protobuf.ByteString getGranteesBytes(int index) {
return grantees_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, dataPolicy_);
}
for (int i = 0; i < grantees_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, grantees_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, dataPolicy_);
}
{
int dataSize = 0;
for (int i = 0; i < grantees_.size(); i++) {
dataSize += computeStringSizeNoTag(grantees_.getRaw(i));
}
size += dataSize;
size += 1 * getGranteesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest other =
(com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest) obj;
if (!getDataPolicy().equals(other.getDataPolicy())) return false;
if (!getGranteesList().equals(other.getGranteesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DATA_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getDataPolicy().hashCode();
if (getGranteesCount() > 0) {
hash = (37 * hash) + GRANTEES_FIELD_NUMBER;
hash = (53 * hash) + getGranteesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the AddGrantees method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest)
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto
.internal_static_google_cloud_bigquery_datapolicies_v2beta1_AddGranteesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto
.internal_static_google_cloud_bigquery_datapolicies_v2beta1_AddGranteesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.class,
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dataPolicy_ = "";
grantees_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto
.internal_static_google_cloud_bigquery_datapolicies_v2beta1_AddGranteesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest build() {
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest buildPartial() {
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest result =
new com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dataPolicy_ = dataPolicy_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
grantees_.makeImmutable();
result.grantees_ = grantees_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest) {
return mergeFrom((com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest other) {
if (other
== com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest.getDefaultInstance())
return this;
if (!other.getDataPolicy().isEmpty()) {
dataPolicy_ = other.dataPolicy_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.grantees_.isEmpty()) {
if (grantees_.isEmpty()) {
grantees_ = other.grantees_;
bitField0_ |= 0x00000002;
} else {
ensureGranteesIsMutable();
grantees_.addAll(other.grantees_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
dataPolicy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureGranteesIsMutable();
grantees_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object dataPolicy_ = "";
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The dataPolicy.
*/
public java.lang.String getDataPolicy() {
java.lang.Object ref = dataPolicy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
dataPolicy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for dataPolicy.
*/
public com.google.protobuf.ByteString getDataPolicyBytes() {
java.lang.Object ref = dataPolicy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
dataPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The dataPolicy to set.
* @return This builder for chaining.
*/
public Builder setDataPolicy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
dataPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearDataPolicy() {
dataPolicy_ = getDefaultInstance().getDataPolicy();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of this data policy, in the format of
* `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`.
* </pre>
*
* <code>
* string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for dataPolicy to set.
* @return This builder for chaining.
*/
public Builder setDataPolicyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
dataPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList grantees_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureGranteesIsMutable() {
if (!grantees_.isModifiable()) {
grantees_ = new com.google.protobuf.LazyStringArrayList(grantees_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return A list containing the grantees.
*/
public com.google.protobuf.ProtocolStringList getGranteesList() {
grantees_.makeImmutable();
return grantees_;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The count of grantees.
*/
public int getGranteesCount() {
return grantees_.size();
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param index The index of the element to return.
* @return The grantees at the given index.
*/
public java.lang.String getGrantees(int index) {
return grantees_.get(index);
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param index The index of the value to return.
* @return The bytes of the grantees at the given index.
*/
public com.google.protobuf.ByteString getGranteesBytes(int index) {
return grantees_.getByteString(index);
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param index The index to set the value at.
* @param value The grantees to set.
* @return This builder for chaining.
*/
public Builder setGrantees(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureGranteesIsMutable();
grantees_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The grantees to add.
* @return This builder for chaining.
*/
public Builder addGrantees(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureGranteesIsMutable();
grantees_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param values The grantees to add.
* @return This builder for chaining.
*/
public Builder addAllGrantees(java.lang.Iterable<java.lang.String> values) {
ensureGranteesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, grantees_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearGrantees() {
grantees_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. IAM principal that should be granted Fine Grained Access to the
* underlying data goverened by the data policy. The target data policy is
* determined by the `data_policy` field.
*
* Uses the [IAM V2 principal
* syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2).
* Supported principal types:
*
* * User
* * Group
* * Service account
* </pre>
*
* <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes of the grantees to add.
* @return This builder for chaining.
*/
public Builder addGranteesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureGranteesIsMutable();
grantees_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest)
private static final com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest();
}
public static com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AddGranteesRequest> PARSER =
new com.google.protobuf.AbstractParser<AddGranteesRequest>() {
@java.lang.Override
public AddGranteesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AddGranteesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AddGranteesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.datapolicies.v2beta1.AddGranteesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 35,677 | flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.configuration;
import org.apache.flink.annotation.Experimental;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.annotation.docs.Documentation;
import org.apache.flink.configuration.description.Description;
import org.apache.flink.core.security.token.DelegationTokenProvider;
import java.security.KeyStore;
import java.time.Duration;
import java.util.List;
import static org.apache.flink.configuration.ConfigOptions.key;
import static org.apache.flink.configuration.description.LineBreakElement.linebreak;
import static org.apache.flink.configuration.description.LinkElement.link;
import static org.apache.flink.configuration.description.TextElement.code;
import static org.apache.flink.configuration.description.TextElement.text;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** The set of configuration options relating to security. */
@PublicEvolving
public class SecurityOptions {
public static final String DELEGATION_TOKEN_PROVIDER_PREFIX =
DelegationTokenProvider.CONFIG_PREFIX + ".<serviceName>";
private static final String DEFAULT_KEYSTORE_DOC = "JVM default keystore type";
// ------------------------------------------------------------------------
// Custom Security Service Loader
// ------------------------------------------------------------------------
public static final ConfigOption<List<String>> SECURITY_CONTEXT_FACTORY_CLASSES =
key("security.context.factory.classes")
.stringType()
.asList()
.defaultValues(
"org.apache.flink.runtime.security.contexts.HadoopSecurityContextFactory",
"org.apache.flink.runtime.security.contexts.NoOpSecurityContextFactory")
.withDescription(
"List of factories that should be used to instantiate a security context. "
+ "If multiple are configured, Flink will use the first compatible "
+ "factory. You should have a NoOpSecurityContextFactory in this list "
+ "as a fallback.");
public static final ConfigOption<List<String>> SECURITY_MODULE_FACTORY_CLASSES =
key("security.module.factory.classes")
.stringType()
.asList()
.defaultValues(
"org.apache.flink.runtime.security.modules.HadoopModuleFactory",
"org.apache.flink.runtime.security.modules.JaasModuleFactory",
"org.apache.flink.runtime.security.modules.ZookeeperModuleFactory")
.withDescription(
"List of factories that should be used to instantiate security "
+ "modules. All listed modules will be installed. Keep in mind that the "
+ "configured security context might rely on some modules being present.");
// ------------------------------------------------------------------------
// Kerberos Options
// ------------------------------------------------------------------------
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<String> KERBEROS_LOGIN_PRINCIPAL =
key("security.kerberos.login.principal")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("security.principal")
.withDescription("Kerberos principal name associated with the keytab.");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<String> KERBEROS_LOGIN_KEYTAB =
key("security.kerberos.login.keytab")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("security.keytab")
.withDescription(
"Absolute path to a Kerberos keytab file that contains the user credentials.");
public static final ConfigOption<String> KERBEROS_KRB5_PATH =
key("security.kerberos.krb5-conf.path")
.stringType()
.noDefaultValue()
.withDescription(
"Specify the local location of the krb5.conf file. If defined, this conf would be mounted on the JobManager and "
+ "TaskManager containers/pods for Kubernetes and Yarn. Note: The KDC defined needs to be visible from inside the containers.");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<Boolean> KERBEROS_LOGIN_USETICKETCACHE =
key("security.kerberos.login.use-ticket-cache")
.booleanType()
.defaultValue(true)
.withDescription("Indicates whether to read from your Kerberos ticket cache.");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<String> KERBEROS_LOGIN_CONTEXTS =
key("security.kerberos.login.contexts")
.stringType()
.noDefaultValue()
.withDescription(
"A comma-separated list of login contexts to provide the Kerberos credentials to"
+ " (for example, `Client,KafkaClient` to use the credentials for ZooKeeper authentication and for"
+ " Kafka authentication)");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<Duration> KERBEROS_RELOGIN_PERIOD =
key("security.kerberos.relogin.period")
.durationType()
.defaultValue(Duration.ofMinutes(1))
.withDescription(
"The time period when keytab login happens automatically in order to always have a valid TGT.");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_KERBEROS)
public static final ConfigOption<List<String>> KERBEROS_HADOOP_FILESYSTEMS_TO_ACCESS =
key("security.kerberos.access.hadoopFileSystems")
.stringType()
.asList()
.noDefaultValue()
.withDeprecatedKeys("yarn.security.kerberos.additionalFileSystems")
.withDescription(
"A semicolon-separated list of Kerberos-secured Hadoop filesystems Flink is going to access. For example, "
+ "security.kerberos.access.hadoopFileSystems=hdfs://namenode2:9002;hdfs://namenode3:9003. "
+ "The JobManager needs to have access to these filesystems to retrieve the security tokens.");
// ------------------------------------------------------------------------
// Delegation Token Options
// ------------------------------------------------------------------------
@Documentation.Section(value = Documentation.Sections.SECURITY_DELEGATION_TOKEN, position = 1)
public static final ConfigOption<Boolean> DELEGATION_TOKENS_ENABLED =
key("security.delegation.tokens.enabled")
.booleanType()
.defaultValue(true)
.withDeprecatedKeys("security.kerberos.fetch.delegation-token")
.withDescription(
"Indicates whether to start delegation tokens system for external services.");
@Documentation.Section(value = Documentation.Sections.SECURITY_DELEGATION_TOKEN, position = 2)
public static final ConfigOption<Duration> DELEGATION_TOKENS_RENEWAL_RETRY_BACKOFF =
key("security.delegation.tokens.renewal.retry.backoff")
.durationType()
.defaultValue(Duration.ofHours(1))
.withDeprecatedKeys("security.kerberos.tokens.renewal.retry.backoff")
.withDescription(
"The time period how long to wait before retrying to obtain new delegation tokens after a failure.");
@Documentation.Section(value = Documentation.Sections.SECURITY_DELEGATION_TOKEN, position = 3)
public static final ConfigOption<Double> DELEGATION_TOKENS_RENEWAL_TIME_RATIO =
key("security.delegation.tokens.renewal.time-ratio")
.doubleType()
.defaultValue(0.75)
.withDeprecatedKeys("security.kerberos.tokens.renewal.time-ratio")
.withDescription(
"Ratio of the tokens's expiration time when new credentials should be re-obtained.");
@Documentation.SuffixOption(DELEGATION_TOKEN_PROVIDER_PREFIX)
@Documentation.Section(value = Documentation.Sections.SECURITY_DELEGATION_TOKEN, position = 4)
public static final ConfigOption<Boolean> DELEGATION_TOKEN_PROVIDER_ENABLED =
key("enabled")
.booleanType()
.defaultValue(true)
.withDescription(
"Controls whether to obtain credentials for services when security is "
+ "enabled. By default, credentials for all supported services "
+ "are retrieved when those services are configured, but it's "
+ "possible to disable that behavior if it somehow conflicts "
+ "with the application being run.");
/**
* Returns a view over the given configuration via which options can be set/retrieved for the
* given provider.
*
* <pre>
* Configuration config = ...
* SecurityOptions.forProvider(config, "my_provider")
* .set(SecurityOptions.DELEGATION_TOKEN_PROVIDER_ENABLED, false)
* ...
* </pre>
*
* @param configuration backing configuration
* @param providerName provider name
* @return view over configuration
*/
@Experimental
public static Configuration forProvider(Configuration configuration, String providerName) {
return new DelegatingConfiguration(
configuration, DelegationTokenProvider.CONFIG_PREFIX + "." + providerName + ".");
}
// ------------------------------------------------------------------------
// ZooKeeper Security Options
// ------------------------------------------------------------------------
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_ZOOKEEPER)
public static final ConfigOption<Boolean> ZOOKEEPER_SASL_DISABLE =
key("zookeeper.sasl.disable").booleanType().defaultValue(false);
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_ZOOKEEPER)
public static final ConfigOption<String> ZOOKEEPER_SASL_SERVICE_NAME =
key("zookeeper.sasl.service-name").stringType().defaultValue("zookeeper");
@Documentation.Section(Documentation.Sections.SECURITY_AUTH_ZOOKEEPER)
public static final ConfigOption<String> ZOOKEEPER_SASL_LOGIN_CONTEXT_NAME =
key("zookeeper.sasl.login-context-name").stringType().defaultValue("Client");
// ------------------------------------------------------------------------
// SSL Security Options
// ------------------------------------------------------------------------
/** Enable SSL for internal communication (pekko rpc, netty data transport, blob server). */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<Boolean> SSL_INTERNAL_ENABLED =
key("security.ssl.internal.enabled")
.booleanType()
.defaultValue(false)
.withDescription(
"Turns on SSL for internal network communication. "
+ "Optionally, specific components may override this through their own settings "
+ "(rpc, data transport, REST, etc).");
/** Enable SSL for external REST endpoints. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<Boolean> SSL_REST_ENABLED =
key("security.ssl.rest.enabled")
.booleanType()
.defaultValue(false)
.withDescription(
"Turns on SSL for external communication via the REST endpoints.");
/** Enable mututal SSL authentication for external REST endpoints. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<Boolean> SSL_REST_AUTHENTICATION_ENABLED =
key("security.ssl.rest.authentication-enabled")
.booleanType()
.defaultValue(false)
.withDescription(
"Turns on mutual SSL authentication for external communication via the REST endpoints.");
// ----------------- certificates (internal + external) -------------------
/** The Java keystore file containing the flink endpoint key and certificate. */
@Documentation.ExcludeFromDocumentation(
"The SSL Setup encourages separate configs for internal and REST security.")
public static final ConfigOption<String> SSL_KEYSTORE =
key("security.ssl.keystore")
.stringType()
.noDefaultValue()
.withDescription(
"The Java keystore file to be used by the flink endpoint for its SSL Key and Certificate.");
/** Secret to decrypt the keystore file. */
@Documentation.ExcludeFromDocumentation(
"The SSL Setup encourages separate configs for internal and REST security.")
public static final ConfigOption<String> SSL_KEYSTORE_PASSWORD =
key("security.ssl.keystore-password")
.stringType()
.noDefaultValue()
.withDescription("The secret to decrypt the keystore file.");
/** Secret to decrypt the server key. */
@Documentation.ExcludeFromDocumentation(
"The SSL Setup encourages separate configs for internal and REST security.")
public static final ConfigOption<String> SSL_KEY_PASSWORD =
key("security.ssl.key-password")
.stringType()
.noDefaultValue()
.withDescription("The secret to decrypt the server key in the keystore.");
/** The truststore file containing the public CA certificates to verify the ssl peers. */
@Documentation.ExcludeFromDocumentation(
"The SSL Setup encourages separate configs for internal and REST security.")
public static final ConfigOption<String> SSL_TRUSTSTORE =
key("security.ssl.truststore")
.stringType()
.noDefaultValue()
.withDescription(
"The truststore file containing the public CA certificates to be used by flink endpoints"
+ " to verify the peer’s certificate.");
/** Secret to decrypt the truststore. */
@Documentation.ExcludeFromDocumentation(
"The SSL Setup encourages separate configs for internal and REST security.")
public static final ConfigOption<String> SSL_TRUSTSTORE_PASSWORD =
key("security.ssl.truststore-password")
.stringType()
.noDefaultValue()
.withDescription("The secret to decrypt the truststore.");
// ----------------------- certificates (internal) ------------------------
/** For internal SSL, the Java keystore file containing the private key and certificate. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_KEYSTORE =
key("security.ssl.internal.keystore")
.stringType()
.noDefaultValue()
.withDescription(
"The Java keystore file with SSL Key and Certificate, "
+ "to be used Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the password to decrypt the keystore file containing the certificate. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_KEYSTORE_PASSWORD =
key("security.ssl.internal.keystore-password")
.stringType()
.noDefaultValue()
.withDescription(
"The secret to decrypt the keystore file for Flink's "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the password to decrypt the private key. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_KEY_PASSWORD =
key("security.ssl.internal.key-password")
.stringType()
.noDefaultValue()
.withDescription(
"The secret to decrypt the key in the keystore "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the type of the keystore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
@Documentation.OverrideDefault(DEFAULT_KEYSTORE_DOC)
public static final ConfigOption<String> SSL_INTERNAL_KEYSTORE_TYPE =
key("security.ssl.internal.keystore-type")
.stringType()
.defaultValue(KeyStore.getDefaultType())
.withDescription(
"The type of keystore "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/**
* For internal SSL, the truststore file containing the public CA certificates to verify the ssl
* peers.
*/
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_TRUSTSTORE =
key("security.ssl.internal.truststore")
.stringType()
.noDefaultValue()
.withDescription(
"The truststore file containing the public CA certificates to verify the peer "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the secret to decrypt the truststore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_TRUSTSTORE_PASSWORD =
key("security.ssl.internal.truststore-password")
.stringType()
.noDefaultValue()
.withDescription(
"The password to decrypt the truststore "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the type of the truststore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
@Documentation.OverrideDefault(DEFAULT_KEYSTORE_DOC)
public static final ConfigOption<String> SSL_INTERNAL_TRUSTSTORE_TYPE =
key("security.ssl.internal.truststore-type")
.stringType()
.defaultValue(KeyStore.getDefaultType())
.withDescription(
"The type of truststore "
+ "for Flink's internal endpoints (rpc, data transport, blob server).");
/** For internal SSL, the sha1 fingerprint of the internal certificate to verify the client. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_INTERNAL_CERT_FINGERPRINT =
key("security.ssl.internal.cert.fingerprint")
.stringType()
.noDefaultValue()
.withDescription(
"The sha1 fingerprint of the internal certificate. "
+ "This further protects the internal communication to present the exact certificate used by Flink."
+ "This is necessary where one cannot use private CA(self signed) or there is internal firm wide CA is required");
// ----------------------- certificates (external) ------------------------
/**
* For external (REST) SSL, the Java keystore file containing the private key and certificate.
*/
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_KEYSTORE =
key("security.ssl.rest.keystore")
.stringType()
.noDefaultValue()
.withDescription(
"The Java keystore file with SSL Key and Certificate, "
+ "to be used Flink's external REST endpoints.");
/**
* For external (REST) SSL, the password to decrypt the keystore file containing the
* certificate.
*/
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_KEYSTORE_PASSWORD =
key("security.ssl.rest.keystore-password")
.stringType()
.noDefaultValue()
.withDescription(
"The secret to decrypt the keystore file for Flink's "
+ "for Flink's external REST endpoints.");
/** For external (REST) SSL, the password to decrypt the private key. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_KEY_PASSWORD =
key("security.ssl.rest.key-password")
.stringType()
.noDefaultValue()
.withDescription(
"The secret to decrypt the key in the keystore "
+ "for Flink's external REST endpoints.");
/** For external (REST) SSL, the type of the keystore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
@Documentation.OverrideDefault(DEFAULT_KEYSTORE_DOC)
public static final ConfigOption<String> SSL_REST_KEYSTORE_TYPE =
key("security.ssl.rest.keystore-type")
.stringType()
.defaultValue(KeyStore.getDefaultType())
.withDescription(
"The type of the keystore for Flink's external REST endpoints.");
/**
* For external (REST) SSL, the truststore file containing the public CA certificates to verify
* the ssl peers.
*/
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_TRUSTSTORE =
key("security.ssl.rest.truststore")
.stringType()
.noDefaultValue()
.withDescription(
"The truststore file containing the public CA certificates to verify the peer "
+ "for Flink's external REST endpoints.");
/** For external (REST) SSL, the secret to decrypt the truststore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_TRUSTSTORE_PASSWORD =
key("security.ssl.rest.truststore-password")
.stringType()
.noDefaultValue()
.withDescription(
"The password to decrypt the truststore "
+ "for Flink's external REST endpoints.");
/** For external (REST) SSL, the type of the truststore. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
@Documentation.OverrideDefault(DEFAULT_KEYSTORE_DOC)
public static final ConfigOption<String> SSL_REST_TRUSTSTORE_TYPE =
key("security.ssl.rest.truststore-type")
.stringType()
.defaultValue(KeyStore.getDefaultType())
.withDescription(
"The type of the truststore for Flink's external REST endpoints.");
/** For external (REST) SSL, the sha1 fingerprint of the rest client certificate to verify. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_REST_CERT_FINGERPRINT =
key("security.ssl.rest.cert.fingerprint")
.stringType()
.noDefaultValue()
.withDescription(
"The sha1 fingerprint of the rest certificate. "
+ "This further protects the rest REST endpoints to present certificate which is only used by proxy server"
+ "This is necessary where once uses public CA or internal firm wide CA");
// ------------------------ ssl parameters --------------------------------
/** SSL protocol version to be supported. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_PROTOCOL =
key("security.ssl.protocol")
.stringType()
.defaultValue("TLSv1.2")
.withDescription(
"The SSL protocol version to be supported for the ssl transport. Note that it doesn’t"
+ " support comma separated list.");
/**
* The standard SSL algorithms to be supported.
*
* <p>More options here -
* http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html#ciphersuites
*/
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<String> SSL_ALGORITHMS =
key("security.ssl.algorithms")
.stringType()
.defaultValue("TLS_RSA_WITH_AES_128_CBC_SHA")
.withDescription(
Description.builder()
.text(
"The comma separated list of standard SSL algorithms to be supported. Read more %s",
link(
"http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html#ciphersuites",
"here"))
.build());
/** Flag to enable/disable hostname verification for the ssl connections. */
@Documentation.Section(Documentation.Sections.SECURITY_SSL)
public static final ConfigOption<Boolean> SSL_VERIFY_HOSTNAME =
key("security.ssl.verify-hostname")
.booleanType()
.defaultValue(true)
.withDescription(
"Flag to enable peer’s hostname verification during ssl handshake.");
/** SSL engine provider. */
@Documentation.Section(Documentation.Sections.EXPERT_SECURITY_SSL)
public static final ConfigOption<String> SSL_PROVIDER =
key("security.ssl.provider")
.stringType()
.defaultValue("JDK")
.withDescription(
Description.builder()
.text("The SSL engine provider to use for the ssl transport:")
.list(
text("%s: default Java-based SSL engine", code("JDK")),
text(
"%s: openSSL-based SSL engine using system libraries",
code("OPENSSL")))
.text(
"%s is based on %s and comes in two flavours:",
code("OPENSSL"),
link(
"http://netty.io/wiki/forked-tomcat-native.html#wiki-h2-4",
"netty-tcnative"))
.list(
text(
"dynamically linked: This will use your system's openSSL libraries "
+ "(if compatible) and requires %s to be copied to %s",
code(
"opt/flink-shaded-netty-tcnative-dynamic-*.jar"),
code("lib/")),
text(
"statically linked: Due to potential licensing issues with "
+ "openSSL (see %s), we cannot ship pre-built libraries. However, "
+ "you can build the required library yourself and put it into %s:%s%s",
link(
"https://issues.apache.org/jira/browse/LEGAL-393",
"LEGAL-393"),
code("lib/"),
linebreak(),
code(
"git clone https://github.com/apache/flink-shaded.git && "
+ "cd flink-shaded && "
+ "mvn clean package -Pinclude-netty-tcnative-static -pl flink-shaded-netty-tcnative-static")))
.build());
// ------------------------ ssl parameters --------------------------------
/** SSL session cache size. */
@Documentation.Section(Documentation.Sections.EXPERT_SECURITY_SSL)
public static final ConfigOption<Integer> SSL_INTERNAL_SESSION_CACHE_SIZE =
key("security.ssl.internal.session-cache-size")
.intType()
.defaultValue(-1)
.withDescription(
Description.builder()
.text(
"The size of the cache used for storing SSL session objects. "
+ "According to %s, you should always set "
+ "this to an appropriate number to not run into a bug with stalling IO threads "
+ "during garbage collection. (-1 = use system default).",
link(
"https://github.com/netty/netty/issues/832",
"here"))
.build())
.withDeprecatedKeys("security.ssl.session-cache-size");
/** SSL session timeout. */
@Documentation.Section(Documentation.Sections.EXPERT_SECURITY_SSL)
public static final ConfigOption<Integer> SSL_INTERNAL_SESSION_TIMEOUT =
key("security.ssl.internal.session-timeout")
.intType()
.defaultValue(-1)
.withDescription(
"The timeout (in ms) for the cached SSL session objects. (-1 = use system default)")
.withDeprecatedKeys("security.ssl.session-timeout");
/** SSL session timeout during handshakes. */
@Documentation.Section(Documentation.Sections.EXPERT_SECURITY_SSL)
public static final ConfigOption<Integer> SSL_INTERNAL_HANDSHAKE_TIMEOUT =
key("security.ssl.internal.handshake-timeout")
.intType()
.defaultValue(-1)
.withDescription(
"The timeout (in ms) during SSL handshake. (-1 = use system default)")
.withDeprecatedKeys("security.ssl.handshake-timeout");
/** SSL session timeout after flushing the <tt>close_notify</tt> message. */
@Documentation.Section(Documentation.Sections.EXPERT_SECURITY_SSL)
public static final ConfigOption<Integer> SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT =
key("security.ssl.internal.close-notify-flush-timeout")
.intType()
.defaultValue(-1)
.withDescription(
"The timeout (in ms) for flushing the `close_notify` that was triggered by closing a "
+ "channel. If the `close_notify` was not flushed in the given timeout the channel will be closed "
+ "forcibly. (-1 = use system default)")
.withDeprecatedKeys("security.ssl.close-notify-flush-timeout");
/**
* Checks whether SSL for internal communication (rpc, data transport, blob server) is enabled.
*/
public static boolean isInternalSSLEnabled(Configuration sslConfig) {
return sslConfig.get(SSL_INTERNAL_ENABLED);
}
/** Checks whether SSL for the external REST endpoint is enabled. */
public static boolean isRestSSLEnabled(Configuration sslConfig) {
return sslConfig.get(SSL_REST_ENABLED);
}
/** Checks whether mutual SSL authentication for the external REST endpoint is enabled. */
public static boolean isRestSSLAuthenticationEnabled(Configuration sslConfig) {
checkNotNull(sslConfig, "sslConfig");
return isRestSSLEnabled(sslConfig) && sslConfig.get(SSL_REST_AUTHENTICATION_ENABLED);
}
}
|
apache/incubator-nemo | 35,833 | runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/TaskExecutor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.nemo.runtime.executor.task;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.nemo.common.Pair;
import org.apache.nemo.common.dag.DAG;
import org.apache.nemo.common.dag.Edge;
import org.apache.nemo.common.ir.OutputCollector;
import org.apache.nemo.common.ir.Readable;
import org.apache.nemo.common.ir.edge.executionproperty.AdditionalOutputTagProperty;
import org.apache.nemo.common.ir.vertex.IRVertex;
import org.apache.nemo.common.ir.vertex.OperatorVertex;
import org.apache.nemo.common.ir.vertex.SourceVertex;
import org.apache.nemo.common.ir.vertex.transform.MessageAggregatorTransform;
import org.apache.nemo.common.ir.vertex.transform.SignalTransform;
import org.apache.nemo.common.ir.vertex.transform.Transform;
import org.apache.nemo.common.punctuation.Finishmark;
import org.apache.nemo.common.punctuation.LatencyMark;
import org.apache.nemo.common.punctuation.Watermark;
import org.apache.nemo.runtime.common.RuntimeIdManager;
import org.apache.nemo.runtime.common.comm.ControlMessage;
import org.apache.nemo.runtime.common.message.MessageEnvironment;
import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
import org.apache.nemo.runtime.common.metric.LatencyMetric;
import org.apache.nemo.runtime.common.metric.StreamMetric;
import org.apache.nemo.runtime.common.plan.RuntimeEdge;
import org.apache.nemo.runtime.common.plan.StageEdge;
import org.apache.nemo.runtime.common.plan.Task;
import org.apache.nemo.runtime.common.state.TaskState;
import org.apache.nemo.runtime.executor.MetricMessageSender;
import org.apache.nemo.runtime.executor.TaskStateManager;
import org.apache.nemo.runtime.executor.TransformContextImpl;
import org.apache.nemo.runtime.executor.data.BroadcastManagerWorker;
import org.apache.nemo.runtime.executor.datatransfer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
/**
* Executes a task.
* Should be accessed by a single thread.
*/
@NotThreadSafe
public final class TaskExecutor {
private static final Logger LOG = LoggerFactory.getLogger(TaskExecutor.class.getName());
private static final String TASK_METRIC_ID = "TaskMetric";
// Essential information
private boolean isExecuted;
private final String taskId;
private final TaskStateManager taskStateManager;
private final List<DataFetcher> dataFetchers;
private final BroadcastManagerWorker broadcastManagerWorker;
private final List<VertexHarness> sortedHarnesses;
// Metrics information
private long boundedSourceReadTime = 0;
private long serializedReadBytes = 0;
private long encodedReadBytes = 0;
private long timeSinceLastExecution;
private long timeSinceLastRecordStreamMetric;
private final Map<String, AtomicLong> numOfReadTupleMap;
private final Map<String, Long> lastSerializedReadByteMap;
private final MetricMessageSender metricMessageSender;
private long latencyMarkSendPeriod = -1;
private final Map<String, Long> latestSentLatencymarkTimestamp;
// Dynamic optimization
private String idOfVertexPutOnHold;
private final PersistentConnectionToMasterMap persistentConnectionToMasterMap;
/**
* Constructor.
*
* @param task Task with information needed during execution.
* @param irVertexDag A DAG of vertices.
* @param taskStateManager State manager for this Task.
* @param intermediateDataIOFactory For reading from/writing to data to other tasks.
* @param broadcastManagerWorker For broadcasts.
* @param metricMessageSender For sending metric with execution stats to the master.
* @param persistentConnectionToMasterMap For sending messages to the master.
*/
public TaskExecutor(final Task task,
final DAG<IRVertex, RuntimeEdge<IRVertex>> irVertexDag,
final TaskStateManager taskStateManager,
final IntermediateDataIOFactory intermediateDataIOFactory,
final BroadcastManagerWorker broadcastManagerWorker,
final MetricMessageSender metricMessageSender,
final PersistentConnectionToMasterMap persistentConnectionToMasterMap,
final int latencyMarkPeriod) {
// Essential information
this.isExecuted = false;
this.taskId = task.getTaskId();
this.taskStateManager = taskStateManager;
this.broadcastManagerWorker = broadcastManagerWorker;
this.latencyMarkSendPeriod = latencyMarkPeriod;
this.latestSentLatencymarkTimestamp = new HashMap<>();
// Metric sender
this.metricMessageSender = metricMessageSender;
// Dynamic optimization
// Assigning null is very bad, but we are keeping this for now
this.idOfVertexPutOnHold = null;
this.persistentConnectionToMasterMap = persistentConnectionToMasterMap;
// Prepare data structures
final Pair<List<DataFetcher>, List<VertexHarness>> pair = prepare(task, irVertexDag, intermediateDataIOFactory);
this.dataFetchers = pair.left();
this.sortedHarnesses = pair.right();
// initialize metrics
this.numOfReadTupleMap = new HashMap<>();
this.lastSerializedReadByteMap = new HashMap<>();
for (DataFetcher dataFetcher : dataFetchers) {
this.numOfReadTupleMap.put(dataFetcher.getDataSource().getId(), new AtomicLong());
this.lastSerializedReadByteMap.put(dataFetcher.getDataSource().getId(), 0L);
}
// set the interval for recording stream metric
this.timeSinceLastRecordStreamMetric = System.currentTimeMillis();
this.timeSinceLastExecution = System.currentTimeMillis();
}
/**
* Send stream metric to the runtime master.
* This method should be called only on a different thread with taskExecutor.
* Because this method can greatly affect to the performance.
*/
public void sendStreamMetric() {
long currentTimestamp = System.currentTimeMillis();
Map<String, StreamMetric> streamMetricMap = new HashMap<>();
for (DataFetcher dataFetcher : dataFetchers) {
String sourceVertexId = dataFetcher.getDataSource().getId();
Pair<Boolean, Long> serReadBytes = Pair.of(false, -1L);
if (dataFetcher instanceof SourceVertexDataFetcher) {
serReadBytes = Pair.of(true, 0L);
} else if (dataFetcher instanceof ParentTaskDataFetcher) {
serReadBytes = ((ParentTaskDataFetcher) dataFetcher).getCurrSerBytes();
} else if (dataFetcher instanceof MultiThreadParentTaskDataFetcher) {
serReadBytes = ((MultiThreadParentTaskDataFetcher) dataFetcher).getCurrSerBytes();
}
// if serializedReadBytes is -1, it means that serializedReadBytes is invalid
if (serReadBytes.right() != -1) {
long lastSerializedReadBytes = lastSerializedReadByteMap.get(sourceVertexId);
lastSerializedReadByteMap.put(sourceVertexId, serReadBytes.right());
serReadBytes = Pair.of(serReadBytes.left(), serReadBytes.right() - lastSerializedReadBytes);
}
long numOfTuples = this.numOfReadTupleMap.get(sourceVertexId).get();
StreamMetric streamMetric = new StreamMetric(this.timeSinceLastRecordStreamMetric, currentTimestamp, numOfTuples,
serReadBytes.right(), serReadBytes.left());
streamMetricMap.put(sourceVertexId, streamMetric);
numOfReadTupleMap.get(sourceVertexId).addAndGet(-numOfTuples);
}
metricMessageSender.send(TASK_METRIC_ID, taskId, "streamMetric",
SerializationUtils.serialize((Serializable) streamMetricMap));
this.timeSinceLastRecordStreamMetric = currentTimestamp;
}
// Get all of the intra-task edges + inter-task edges
private List<Edge> getAllIncomingEdges(
final Task task,
final DAG<IRVertex, RuntimeEdge<IRVertex>> irVertexDag,
final IRVertex childVertex) {
final List<Edge> edges = new ArrayList<>();
edges.addAll(irVertexDag.getIncomingEdgesOf(childVertex));
final List<StageEdge> taskEdges = task.getTaskIncomingEdges().stream()
.filter(edge -> edge.getDstIRVertex().getId().equals(childVertex.getId()))
.collect(Collectors.toList());
edges.addAll(taskEdges);
return edges;
}
/**
* Converts the DAG of vertices into pointer-based DAG of vertex harnesses.
* This conversion is necessary for constructing concrete data channels for each vertex's inputs and outputs.
* <p>
* - Source vertex read: Explicitly handled (SourceVertexDataFetcher)
* - Sink vertex write: Implicitly handled within the vertex
* <p>
* - Parent-task read: Explicitly handled (ParentTaskDataFetcher)
* - Children-task write: Explicitly handled (VertexHarness)
* <p>
* - Intra-task read: Implicitly handled when performing Intra-task writes
* - Intra-task write: Explicitly handled (VertexHarness)
* <p>
* For element-wise data processing, we traverse vertex harnesses from the roots to the leaves for each element.
* This means that overheads associated with jumping from one harness to the other should be minimal.
* For example, we should never perform an expensive hash operation to traverse the harnesses.
*
* @param task task.
* @param irVertexDag dag.
* @param intermediateDataIOFactory intermediate IO.
* @return fetchers and harnesses.
*/
private Pair<List<DataFetcher>, List<VertexHarness>> prepare(
final Task task,
final DAG<IRVertex, RuntimeEdge<IRVertex>> irVertexDag,
final IntermediateDataIOFactory intermediateDataIOFactory) {
final int taskIndex = RuntimeIdManager.getIndexFromTaskId(task.getTaskId());
// Traverse in a reverse-topological order to ensure that each visited vertex's children vertices exist.
final List<IRVertex> reverseTopologicallySorted = Lists.reverse(irVertexDag.getTopologicalSort());
// Build a map for edge as a key and edge index as a value
// This variable is used for creating NextIntraTaskOperatorInfo
// in {@link this#getInternalMainOutputs and this#internalMainOutputs}
final Map<Edge, Integer> edgeIndexMap = new HashMap<>();
reverseTopologicallySorted.forEach(childVertex -> {
final List<Edge> edges = getAllIncomingEdges(task, irVertexDag, childVertex);
for (int edgeIndex = 0; edgeIndex < edges.size(); edgeIndex++) {
final Edge edge = edges.get(edgeIndex);
edgeIndexMap.putIfAbsent(edge, edgeIndex);
}
});
// Build a map for InputWatermarkManager for each operator vertex
// This variable is used for creating NextIntraTaskOperatorInfo
// in {@link this#getInternalMainOutputs and this#internalMainOutputs}
final Map<IRVertex, InputWatermarkManager> operatorWatermarkManagerMap = new HashMap<>();
reverseTopologicallySorted.forEach(childVertex -> {
if (childVertex instanceof OperatorVertex) {
final List<Edge> edges = getAllIncomingEdges(task, irVertexDag, childVertex);
if (edges.size() == 1) {
operatorWatermarkManagerMap.putIfAbsent(childVertex,
new SingleInputWatermarkManager(
new OperatorWatermarkCollector((OperatorVertex) childVertex)));
} else {
operatorWatermarkManagerMap.putIfAbsent(childVertex,
new MultiInputWatermarkManager(edges.size(),
new OperatorWatermarkCollector((OperatorVertex) childVertex)));
}
}
});
// Create a harness for each vertex
final List<DataFetcher> dataFetcherList = new ArrayList<>();
final Map<String, VertexHarness> vertexIdToHarness = new HashMap<>();
reverseTopologicallySorted.forEach(irVertex -> {
final Optional<Readable> sourceReader = getSourceVertexReader(irVertex, task.getIrVertexIdToReadable());
if (sourceReader.isPresent() != irVertex instanceof SourceVertex) {
throw new IllegalStateException(irVertex.toString());
}
// Additional outputs
final Map<String, List<NextIntraTaskOperatorInfo>> internalAdditionalOutputMap =
getInternalOutputMap(irVertex, irVertexDag, edgeIndexMap, operatorWatermarkManagerMap);
final Map<String, List<OutputWriter>> externalAdditionalOutputMap =
getExternalAdditionalOutputMap(irVertex, task.getTaskOutgoingEdges(), intermediateDataIOFactory);
// Main outputs
final List<NextIntraTaskOperatorInfo> internalMainOutputs;
if (internalAdditionalOutputMap.containsKey(AdditionalOutputTagProperty.getMainOutputTag())) {
internalMainOutputs = internalAdditionalOutputMap.remove(AdditionalOutputTagProperty.getMainOutputTag());
} else {
internalMainOutputs = new ArrayList<>();
}
final List<OutputWriter> externalMainOutputs =
getExternalMainOutputs(irVertex, task.getTaskOutgoingEdges(), intermediateDataIOFactory);
final OutputCollector outputCollector;
if (irVertex instanceof OperatorVertex
&& ((OperatorVertex) irVertex).getTransform() instanceof MessageAggregatorTransform) {
outputCollector = new RunTimeMessageOutputCollector<Map<Object, Long>>(
taskId, irVertex, persistentConnectionToMasterMap, this, true);
} else if (irVertex instanceof OperatorVertex
&& ((OperatorVertex) irVertex).getTransform() instanceof SignalTransform) {
outputCollector = new RunTimeMessageOutputCollector<Map<String, Long>>(
taskId, irVertex, persistentConnectionToMasterMap, this, false);
} else {
outputCollector = new OperatorVertexOutputCollector(
irVertex, internalMainOutputs, internalAdditionalOutputMap,
externalMainOutputs, externalAdditionalOutputMap);
}
// Create VERTEX HARNESS
final VertexHarness vertexHarness = new VertexHarness(
irVertex, outputCollector, new TransformContextImpl(broadcastManagerWorker),
externalMainOutputs, externalAdditionalOutputMap);
prepareTransform(vertexHarness);
vertexIdToHarness.put(irVertex.getId(), vertexHarness);
// Prepare data READ
// Source read
if (irVertex instanceof SourceVertex) {
// Source vertex read
dataFetcherList.add(new SourceVertexDataFetcher(
(SourceVertex) irVertex,
sourceReader.get(),
outputCollector,
latencyMarkSendPeriod,
taskId));
}
// Parent-task read
// TODO #285: Cache broadcasted data
task.getTaskIncomingEdges()
.stream()
.filter(inEdge -> inEdge.getDstIRVertex().getId().equals(irVertex.getId())) // edge to this vertex
.map(incomingEdge ->
Pair.of(incomingEdge, intermediateDataIOFactory
.createReader(task.getTaskId(), incomingEdge.getSrcIRVertex(), incomingEdge)))
.forEach(pair -> {
if (irVertex instanceof OperatorVertex) {
final StageEdge edge = pair.left();
final int edgeIndex = edgeIndexMap.get(edge);
final InputWatermarkManager watermarkManager = operatorWatermarkManagerMap.get(irVertex);
final InputReader parentTaskReader = pair.right();
final OutputCollector dataFetcherOutputCollector =
new DataFetcherOutputCollector((OperatorVertex) irVertex, edgeIndex, watermarkManager);
if (parentTaskReader instanceof PipeInputReader) {
dataFetcherList.add(
new MultiThreadParentTaskDataFetcher(
parentTaskReader.getSrcIrVertex(),
parentTaskReader,
dataFetcherOutputCollector));
} else {
dataFetcherList.add(
new ParentTaskDataFetcher(
parentTaskReader.getSrcIrVertex(),
parentTaskReader,
dataFetcherOutputCollector));
}
}
});
});
final List<VertexHarness> sortedHarnessList = irVertexDag.getTopologicalSort()
.stream()
.map(vertex -> vertexIdToHarness.get(vertex.getId()))
.collect(Collectors.toList());
return Pair.of(dataFetcherList, sortedHarnessList);
}
/**
* Process a data element down the DAG dependency.
*/
private void processElement(final OutputCollector outputCollector, final Object dataElement) {
outputCollector.emit(dataElement);
}
private void processWatermark(final OutputCollector outputCollector,
final Watermark watermark) {
outputCollector.emitWatermark(watermark);
}
private void processLatencymark(final OutputCollector outputCollector,
final LatencyMark latencymark) {
outputCollector.emitLatencymark(latencymark);
}
/**
* Execute a task, while handling unrecoverable errors and exceptions.
*/
public void execute() {
try {
doExecute();
} catch (Throwable throwable) {
// ANY uncaught throwable is reported to the master
taskStateManager.onTaskStateChanged(TaskState.State.FAILED, Optional.empty(), Optional.empty());
LOG.error(ExceptionUtils.getStackTrace(throwable));
}
}
/**
* The task is executed in the following two phases.
* - Phase 1: Consume task-external input data
* - Phase 2: Finalize task-internal states and data elements
*/
private void doExecute() {
// Housekeeping stuff
if (isExecuted) {
throw new RuntimeException("Task {" + taskId + "} execution called again");
}
LOG.info("{} started", taskId);
taskStateManager.onTaskStateChanged(TaskState.State.EXECUTING, Optional.empty(), Optional.empty());
final long executionStartTime = System.currentTimeMillis();
metricMessageSender.send(TASK_METRIC_ID, taskId, "schedulingOverhead",
SerializationUtils.serialize(executionStartTime - timeSinceLastExecution));
// Phase 1: Consume task-external input data.
if (!handleDataFetchers(dataFetchers)) {
return;
}
sendMetrics();
// Phase 2: Finalize task-internal states and elements
for (final VertexHarness vertexHarness : sortedHarnesses) {
finalizeVertex(vertexHarness);
}
this.timeSinceLastExecution = System.currentTimeMillis();
metricMessageSender.send(TASK_METRIC_ID, taskId, "taskDuration",
SerializationUtils.serialize(timeSinceLastExecution - executionStartTime));
if (idOfVertexPutOnHold == null) {
taskStateManager.onTaskStateChanged(TaskState.State.COMPLETE, Optional.empty(), Optional.empty());
LOG.info("{} completed", taskId);
} else {
taskStateManager.onTaskStateChanged(TaskState.State.ON_HOLD,
Optional.of(idOfVertexPutOnHold),
Optional.empty());
LOG.info("{} on hold", taskId);
}
}
/**
* Send data-processing metrics.
*/
public void sendMetrics() {
metricMessageSender.send(TASK_METRIC_ID, taskId, "boundedSourceReadTime",
SerializationUtils.serialize(boundedSourceReadTime));
metricMessageSender.send(TASK_METRIC_ID, taskId, "serializedReadBytes",
SerializationUtils.serialize(serializedReadBytes));
metricMessageSender.send(TASK_METRIC_ID, taskId, "encodedReadBytes",
SerializationUtils.serialize(encodedReadBytes));
}
/**
* Finalize the vertex.
* @param vertexHarness the vertex harness.
*/
private void finalizeVertex(final VertexHarness vertexHarness) {
closeTransform(vertexHarness);
finalizeOutputWriters(vertexHarness);
}
/**
* Process an event generated from the dataFetcher.
* If the event is an instance of Finishmark, we remove the dataFetcher from the current list.
*
* @param event event
* @param dataFetcher current data fetcher
*/
private void onEventFromDataFetcher(final Object event,
final DataFetcher dataFetcher) {
if (event instanceof Finishmark) {
// We've consumed all the data from this data fetcher.
if (dataFetcher instanceof SourceVertexDataFetcher) {
boundedSourceReadTime += ((SourceVertexDataFetcher) dataFetcher).getBoundedSourceReadTime();
} else if (dataFetcher instanceof ParentTaskDataFetcher) {
serializedReadBytes += ((ParentTaskDataFetcher) dataFetcher).getSerializedBytes();
encodedReadBytes += ((ParentTaskDataFetcher) dataFetcher).getEncodedBytes();
} else if (dataFetcher instanceof MultiThreadParentTaskDataFetcher) {
serializedReadBytes += ((MultiThreadParentTaskDataFetcher) dataFetcher).getSerializedBytes();
encodedReadBytes += ((MultiThreadParentTaskDataFetcher) dataFetcher).getEncodedBytes();
}
} else if (event instanceof LatencyMark) {
LatencyMark latencymark = (LatencyMark) event;
long currTimestamp = System.currentTimeMillis();
// send latencyMetric to RuntimeMaster
LatencyMetric metric = new LatencyMetric(latencymark, currTimestamp);
if (metric.getLatency() > 0) {
metricMessageSender.send(TASK_METRIC_ID, taskId, "latencymark", SerializationUtils.serialize(metric));
}
long latestSentTimestamp = latestSentLatencymarkTimestamp.getOrDefault(latencymark.getCreatedTaskId(), -1L);
if (latestSentTimestamp < latencymark.getCreatedTimestamp()) {
latestSentLatencymarkTimestamp.put(latencymark.getCreatedTaskId(), latencymark.getCreatedTimestamp());
// set previousTaskId and timestamp of latencymark for next task.
latencymark.setPreviousTaskId(taskId);
latencymark.setPreviousSentTimestamp(currTimestamp);
// process latencymark for downstream tasks
processLatencymark(dataFetcher.getOutputCollector(), latencymark);
}
} else if (event instanceof Watermark) {
// Watermark
processWatermark(dataFetcher.getOutputCollector(), (Watermark) event);
} else {
// Process data element
processElement(dataFetcher.getOutputCollector(), event);
// increase the number of read tuples
numOfReadTupleMap.get(dataFetcher.getDataSource().getId()).incrementAndGet();
}
}
/**
* Check if it is time to poll pending fetchers' data.
*
* @param pollingPeriod polling period
* @param currentTime current time
* @param prevTime prev time
*/
private boolean isPollingTime(final long pollingPeriod, final long currentTime, final long prevTime) {
return (currentTime - prevTime) >= pollingPeriod;
}
/**
* This retrieves data from data fetchers and process them.
* It maintains two lists:
* -- availableFetchers: maintain data fetchers that currently have data elements to retreive
* -- pendingFetchers: maintain data fetchers that currently do not have available elements.
* This can become available in the future, and therefore we check the pending fetchers every pollingInterval.
* <p>
* If a data fetcher finishes, we remove it from the two lists.
* If a data fetcher has no available element, we move the data fetcher to pendingFetchers
* If a pending data fetcher has element, we move it to availableFetchers
* If there are no available fetchers but pending fetchers, sleep for pollingPeriod
* and retry fetching data from the pendingFetchers.
*
* @param fetchers to handle.
* @return false if IOException.
*/
private boolean handleDataFetchers(final List<DataFetcher> fetchers) {
final List<DataFetcher> availableFetchers = new LinkedList<>(fetchers);
final List<DataFetcher> pendingFetchers = new LinkedList<>();
// Polling interval.
final long pollingInterval = 100; // ms
// Previous polling time
long prevPollingTime = System.currentTimeMillis();
// empty means we've consumed all task-external input data
while (!availableFetchers.isEmpty() || !pendingFetchers.isEmpty()) {
// We first fetch data from available data fetchers
final Iterator<DataFetcher> availableIterator = availableFetchers.iterator();
while (availableIterator.hasNext()) {
final DataFetcher dataFetcher = availableIterator.next();
try {
final Object element = dataFetcher.fetchDataElement();
onEventFromDataFetcher(element, dataFetcher);
if (element instanceof Finishmark) {
availableIterator.remove();
}
} catch (final NoSuchElementException e) {
// No element in current data fetcher, fetch data from next fetcher
// move current data fetcher to pending.
availableIterator.remove();
pendingFetchers.add(dataFetcher);
} catch (final IOException e) {
// IOException means that this task should be retried.
taskStateManager.onTaskStateChanged(TaskState.State.SHOULD_RETRY,
Optional.empty(), Optional.of(TaskState.RecoverableTaskFailureCause.INPUT_READ_FAILURE));
LOG.error("{} Execution Failed (Recoverable: input read failure)! Exception: {}", taskId, e);
return false;
}
}
final Iterator<DataFetcher> pendingIterator = pendingFetchers.iterator();
final long currentTime = System.currentTimeMillis();
if (isPollingTime(pollingInterval, currentTime, prevPollingTime)) {
// We check pending data every polling interval
prevPollingTime = currentTime;
while (pendingIterator.hasNext()) {
final DataFetcher dataFetcher = pendingIterator.next();
try {
final Object element = dataFetcher.fetchDataElement();
onEventFromDataFetcher(element, dataFetcher);
// We processed data. This means the data fetcher is now available.
// Add current data fetcher to available
pendingIterator.remove();
if (!(element instanceof Finishmark)) {
availableFetchers.add(dataFetcher);
}
} catch (final NoSuchElementException e) {
// The current data fetcher is still pending.. try next data fetcher
} catch (final IOException e) {
// IOException means that this task should be retried.
taskStateManager.onTaskStateChanged(TaskState.State.SHOULD_RETRY,
Optional.empty(), Optional.of(TaskState.RecoverableTaskFailureCause.INPUT_READ_FAILURE));
LOG.error("{} Execution Failed (Recoverable: input read failure)! Exception: {}", taskId, e);
return false;
}
}
}
// If there are no available fetchers,
// Sleep and retry fetching element from pending fetchers every polling interval
if (availableFetchers.isEmpty() && !pendingFetchers.isEmpty()) {
try {
Thread.sleep(pollingInterval);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
// Close all data fetchers
fetchers.forEach(fetcher -> {
try {
fetcher.close();
} catch (final Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
});
return true;
}
////////////////////////////////////////////// Helper methods for setting up initial data structures
private Map<String, List<OutputWriter>> getExternalAdditionalOutputMap(
final IRVertex irVertex,
final List<StageEdge> outEdgesToChildrenTasks,
final IntermediateDataIOFactory intermediateDataIOFactory) {
// Add all inter-task additional tags to additional output map.
final Map<String, List<OutputWriter>> map = new HashMap<>();
outEdgesToChildrenTasks
.stream()
.filter(edge -> edge.getSrcIRVertex().getId().equals(irVertex.getId()))
.filter(edge -> edge.getPropertyValue(AdditionalOutputTagProperty.class).isPresent())
.map(edge ->
Pair.of(edge.getPropertyValue(AdditionalOutputTagProperty.class).get(),
intermediateDataIOFactory.createWriter(taskId, edge)))
.forEach(pair -> {
map.putIfAbsent(pair.left(), new ArrayList<>());
map.get(pair.left()).add(pair.right());
});
return map;
}
/**
* Return a map of Internal Outputs associated with their output tag.
* If an edge has no output tag, its info are added to the mainOutputTag.
*
* @param irVertex source irVertex
* @param irVertexDag DAG of IRVertex and RuntimeEdge
* @param edgeIndexMap Map of edge and index
* @param operatorWatermarkManagerMap Map of irVertex and InputWatermarkManager
* @return The map of output tag to the list of next intra-task operator information.
*/
private Map<String, List<NextIntraTaskOperatorInfo>> getInternalOutputMap(
final IRVertex irVertex,
final DAG<IRVertex, RuntimeEdge<IRVertex>> irVertexDag,
final Map<Edge, Integer> edgeIndexMap,
final Map<IRVertex, InputWatermarkManager> operatorWatermarkManagerMap) {
// Add all intra-task tags to additional output map.
final Map<String, List<NextIntraTaskOperatorInfo>> map = new HashMap<>();
irVertexDag.getOutgoingEdgesOf(irVertex.getId())
.stream()
.map(edge -> {
final boolean isPresent = edge.getPropertyValue(AdditionalOutputTagProperty.class).isPresent();
final String outputTag;
if (isPresent) {
outputTag = edge.getPropertyValue(AdditionalOutputTagProperty.class).get();
} else {
outputTag = AdditionalOutputTagProperty.getMainOutputTag();
}
final int index = edgeIndexMap.get(edge);
final OperatorVertex nextOperator = (OperatorVertex) edge.getDst();
final InputWatermarkManager inputWatermarkManager = operatorWatermarkManagerMap.get(nextOperator);
return Pair.of(outputTag, new NextIntraTaskOperatorInfo(index, nextOperator, inputWatermarkManager));
})
.forEach(pair -> {
map.putIfAbsent(pair.left(), new ArrayList<>());
map.get(pair.left()).add(pair.right());
});
return map;
}
/**
* Return inter-task OutputWriters, for single output or output associated with main tag.
*
* @param irVertex source irVertex
* @param outEdgesToChildrenTasks outgoing edges to child tasks
* @param intermediateDataIOFactory intermediateDataIOFactory
* @return OutputWriters for main children tasks
*/
private List<OutputWriter> getExternalMainOutputs(final IRVertex irVertex,
final List<StageEdge> outEdgesToChildrenTasks,
final IntermediateDataIOFactory intermediateDataIOFactory) {
return outEdgesToChildrenTasks
.stream()
.filter(edge -> edge.getSrcIRVertex().getId().equals(irVertex.getId()))
.filter(edge -> !edge.getPropertyValue(AdditionalOutputTagProperty.class).isPresent())
.map(outEdgeForThisVertex -> intermediateDataIOFactory
.createWriter(taskId, outEdgeForThisVertex))
.collect(Collectors.toList());
}
private Optional<Readable> getSourceVertexReader(final IRVertex irVertex,
final Map<String, Readable> irVertexIdToReadable) {
if (irVertex instanceof SourceVertex) {
final Readable readable = irVertexIdToReadable.get(irVertex.getId());
if (readable == null) {
throw new IllegalStateException(irVertex.toString());
}
return Optional.of(readable);
} else {
return Optional.empty();
}
}
private List<InputReader> getParentTaskReaders(final String dstTaskId,
final List<StageEdge> inEdgesFromParentTasks,
final IntermediateDataIOFactory intermediateDataIOFactory) {
return inEdgesFromParentTasks
.stream()
.map(inEdgeForThisVertex -> intermediateDataIOFactory
.createReader(dstTaskId, inEdgeForThisVertex.getSrcIRVertex(), inEdgeForThisVertex))
.collect(Collectors.toList());
}
////////////////////////////////////////////// Transform-specific helper methods
private void prepareTransform(final VertexHarness vertexHarness) {
final IRVertex irVertex = vertexHarness.getIRVertex();
final Transform transform;
if (irVertex instanceof OperatorVertex) {
transform = ((OperatorVertex) irVertex).getTransform();
transform.prepare(vertexHarness.getContext(), vertexHarness.getOutputCollector());
}
}
private void closeTransform(final VertexHarness vertexHarness) {
final IRVertex irVertex = vertexHarness.getIRVertex();
final Transform transform;
if (irVertex instanceof OperatorVertex) {
transform = ((OperatorVertex) irVertex).getTransform();
transform.close();
}
vertexHarness.getContext().getSerializedData().ifPresent(data ->
persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).send(
ControlMessage.Message.newBuilder()
.setId(RuntimeIdManager.generateMessageId())
.setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
.setType(ControlMessage.MessageType.ExecutorDataCollected)
.setDataCollected(ControlMessage.DataCollectMessage.newBuilder().setData(data).build())
.build()));
}
////////////////////////////////////////////// Misc
public void setIRVertexPutOnHold(final IRVertex irVertex) {
idOfVertexPutOnHold = irVertex.getId();
}
/**
* Finalize the output write of this vertex.
* As element-wise output write is done and the block is in memory,
* flush the block into the designated data store and commit it.
*
* @param vertexHarness harness.
*/
private void finalizeOutputWriters(final VertexHarness vertexHarness) {
final List<Long> writtenBytesList = new ArrayList<>();
// finalize OutputWriters for main children
vertexHarness.getWritersToMainChildrenTasks().forEach(outputWriter -> {
outputWriter.close();
final Optional<Long> writtenBytes = outputWriter.getWrittenBytes();
writtenBytes.ifPresent(writtenBytesList::add);
});
// finalize OutputWriters for additional tagged children
vertexHarness.getWritersToAdditionalChildrenTasks().values().forEach(outputWriters ->
outputWriters.forEach(outputWriter -> {
outputWriter.close();
final Optional<Long> writtenBytes = outputWriter.getWrittenBytes();
writtenBytes.ifPresent(writtenBytesList::add);
})
);
long totalWrittenBytes = 0;
for (final Long writtenBytes : writtenBytesList) {
totalWrittenBytes += writtenBytes;
}
// TODO #236: Decouple metric collection and sending logic
metricMessageSender.send(TASK_METRIC_ID, taskId, "taskOutputBytes",
SerializationUtils.serialize(totalWrittenBytes));
}
}
|
googleapis/google-cloud-java | 35,552 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/UpdateEntryGroupRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdateEntryGroupRequest}
*/
public final class UpdateEntryGroupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.UpdateEntryGroupRequest)
UpdateEntryGroupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateEntryGroupRequest.newBuilder() to construct.
private UpdateEntryGroupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateEntryGroupRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateEntryGroupRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateEntryGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateEntryGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.class,
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.Builder.class);
}
private int bitField0_;
public static final int ENTRY_GROUP_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.v1.EntryGroup entryGroup_;
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the entryGroup field is set.
*/
@java.lang.Override
public boolean hasEntryGroup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The entryGroup.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.EntryGroup getEntryGroup() {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.EntryGroupOrBuilder getEntryGroupOrBuilder() {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getEntryGroup());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEntryGroup());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest other =
(com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest) obj;
if (hasEntryGroup() != other.hasEntryGroup()) return false;
if (hasEntryGroup()) {
if (!getEntryGroup().equals(other.getEntryGroup())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEntryGroup()) {
hash = (37 * hash) + ENTRY_GROUP_FIELD_NUMBER;
hash = (53 * hash) + getEntryGroup().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdateEntryGroupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.UpdateEntryGroupRequest)
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateEntryGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateEntryGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.class,
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEntryGroupFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
entryGroup_ = null;
if (entryGroupBuilder_ != null) {
entryGroupBuilder_.dispose();
entryGroupBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateEntryGroupRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest build() {
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest buildPartial() {
com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest result =
new com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.entryGroup_ = entryGroupBuilder_ == null ? entryGroup_ : entryGroupBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest other) {
if (other == com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest.getDefaultInstance())
return this;
if (other.hasEntryGroup()) {
mergeEntryGroup(other.getEntryGroup());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getEntryGroupFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.v1.EntryGroup entryGroup_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>
entryGroupBuilder_;
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the entryGroup field is set.
*/
public boolean hasEntryGroup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The entryGroup.
*/
public com.google.cloud.datacatalog.v1.EntryGroup getEntryGroup() {
if (entryGroupBuilder_ == null) {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
} else {
return entryGroupBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEntryGroup(com.google.cloud.datacatalog.v1.EntryGroup value) {
if (entryGroupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
entryGroup_ = value;
} else {
entryGroupBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEntryGroup(
com.google.cloud.datacatalog.v1.EntryGroup.Builder builderForValue) {
if (entryGroupBuilder_ == null) {
entryGroup_ = builderForValue.build();
} else {
entryGroupBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEntryGroup(com.google.cloud.datacatalog.v1.EntryGroup value) {
if (entryGroupBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& entryGroup_ != null
&& entryGroup_ != com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()) {
getEntryGroupBuilder().mergeFrom(value);
} else {
entryGroup_ = value;
}
} else {
entryGroupBuilder_.mergeFrom(value);
}
if (entryGroup_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEntryGroup() {
bitField0_ = (bitField0_ & ~0x00000001);
entryGroup_ = null;
if (entryGroupBuilder_ != null) {
entryGroupBuilder_.dispose();
entryGroupBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.EntryGroup.Builder getEntryGroupBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEntryGroupFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.EntryGroupOrBuilder getEntryGroupOrBuilder() {
if (entryGroupBuilder_ != null) {
return entryGroupBuilder_.getMessageOrBuilder();
} else {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
}
/**
*
*
* <pre>
* Required. Updates for the entry group. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>
getEntryGroupFieldBuilder() {
if (entryGroupBuilder_ == null) {
entryGroupBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>(
getEntryGroup(), getParentForChildren(), isClean());
entryGroup_ = null;
}
return entryGroupBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on an entry group.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.UpdateEntryGroupRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.UpdateEntryGroupRequest)
private static final com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest();
}
public static com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateEntryGroupRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateEntryGroupRequest>() {
@java.lang.Override
public UpdateEntryGroupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateEntryGroupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateEntryGroupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jena | 35,798 | jena-core/src/main/java/org/apache/jena/reasoner/rulesys/impl/LPInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.reasoner.rulesys.impl;
import java.util.*;
import org.apache.jena.graph.* ;
import org.apache.jena.reasoner.* ;
import org.apache.jena.reasoner.rulesys.* ;
import org.apache.jena.reasoner.rulesys.impl.RuleClauseCode.CompileState.RuleClauseCodeList ;
import org.apache.jena.util.PrintUtil ;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Bytecode interpreter engine for the LP version of the backward
* chaining rule system. An instance of this is forked off for each
* parallel query.
*/
public class LPInterpreter {
// =======================================================================
// Variables
/** The engine which is using this interpreter */
protected LPBRuleEngine engine;
/** The execution context that should be notified of suspended branches */
protected LPInterpreterContext iContext;
/** True if the engine has terminated */
protected boolean isComplete = false;
/** The set of temporary variables (Ti) in use by this interpreter */
protected Node[] tVars = new Node[RuleClauseCode.MAX_TEMPORARY_VARS];
/** The set of argument variables (Ai) in use by this interpreter */
protected Node[] argVars = new Node[RuleClauseCode.MAX_ARGUMENT_VARS];
/** The set of "permanent" variables (Yi) in use by this interpreter */
protected Node[] pVars = null;
/** The current environment frame */
protected EnvironmentFrame envFrame;
/** The current choice point frame */
protected FrameObject cpFrame;
/** The trail of variable bindings that have to be unwound on backtrack */
protected ArrayList<Node> trail = new ArrayList<>();
/** The execution context description to be passed to builtins */
protected RuleContext context;
/** Trick to allow the very top level triple lookup to return results with reduced store turnover */
protected TopLevelTripleMatchFrame topTMFrame;
/** Original set up goal, only used for debugging */
protected TriplePattern goal;
static Logger logger = LoggerFactory.getLogger(LPInterpreter.class);
// =======================================================================
// Constructors
/**
* Constructor used to construct top level calls.
* @param engine the engine which is calling this interpreter
* @param goal the query to be satisfied
*/
public LPInterpreter(LPBRuleEngine engine, TriplePattern goal) {
this(engine, goal, engine.getRuleStore().codeFor(goal), true);
}
/**
* Constructor.
* @param engine the engine which is calling this interpreter
* @param goal the query to be satisfied
* @param isTop true if this is a top level call from the outside iterator, false means it is an
* internal generator call which means we don't need to insert an tabled call
*/
public LPInterpreter(LPBRuleEngine engine, TriplePattern goal, boolean isTop) {
this(engine, goal, engine.getRuleStore().codeFor(goal), isTop);
}
/**
* Constructor.
* @param engine the engine which is calling this interpreter
* @param goal the query to be satisfied
* @param clauses the set of code blocks needed to implement this goal
* @param isTop true if this is a top level call from the outside iterator, false means it is an
* internal generator call which means we don't need to insert an tabled call
*/
public LPInterpreter(LPBRuleEngine engine, TriplePattern goal, List<RuleClauseCode> clauses, boolean isTop) {
this.engine = engine;
this.goal = goal; // Used for debug only
// Construct dummy top environemnt which is a call into the clauses for this goal
if (engine.getDerivationLogging()) {
envFrame = new EnvironmentFrameWithDerivation(RuleClauseCode.returnCodeBlock);
} else {
envFrame = new EnvironmentFrame(RuleClauseCode.returnCodeBlock);
}
envFrame.allocate(RuleClauseCode.MAX_PERMANENT_VARS);
HashMap<Node, Node> mappedVars = new HashMap<>();
envFrame.pVars[0] = argVars[0] = standardize(goal.getSubject(), mappedVars);
envFrame.pVars[1] = argVars[1] = standardize(goal.getPredicate(), mappedVars);
envFrame.pVars[2] = argVars[2] = standardize(goal.getObject(), mappedVars);
if (engine.getDerivationLogging()) {
((EnvironmentFrameWithDerivation)envFrame).initDerivationRecord(argVars);
}
if (clauses != null && clauses.size() > 0) {
if (isTop && engine.getRuleStore().isTabled(goal)) {
setupTabledCall(0, 0);
// setupClauseCall(0, 0, clauses);
} else {
setupClauseCall(0, 0, clauses, goal.isGround());
}
}
// TripleMatchFrame tmFrame = new TripleMatchFrame(this);
topTMFrame = new TopLevelTripleMatchFrame(this, goal);
topTMFrame.linkTo(cpFrame);
topTMFrame.setContinuation(0, 0);
cpFrame = topTMFrame;
}
/**
* Called by top level interpreter to set to execution context for this interpreter to be
* top level instead of an internal generator.
*/
public void setTopInterpreter(LPInterpreterContext context) {
iContext = context;
FrameObject topChoice = topTMFrame.getLink();
if (topChoice instanceof ConsumerChoicePointFrame) {
((ConsumerChoicePointFrame)topChoice).context = context;
}
}
// =======================================================================
// Control methods
/**
* Stop the current work. This is called if the top level results iterator has
* either finished or the calling application has had enough.
*/
public void close() {
isComplete = true;
if (cpFrame != null) cpFrame.close();
engine.detach(this);
if (topTMFrame != null) topTMFrame.close();
}
/**
* Start the interpreter running with the given context.
*/
public void setState(LPInterpreterState state) {
if (state instanceof ConsumerChoicePointFrame) {
restoreState((ConsumerChoicePointFrame) state);
} else {
iContext = (LPInterpreterContext) state;
}
}
/**
* Return the next result from this engine, no further initialization.
* Should be called from within an appropriately synchronized block.
*/
public Object next() {
boolean traceOn = engine.isTraceOn();
// System.out.println("next() on interpreter for goal " + goal);
StateFlag answer = run();
// System.out.println("end next() on interpreter for goal " + goal);
if (answer == StateFlag.FAIL || answer == StateFlag.SUSPEND) {
return answer;
} else if (answer == StateFlag.SATISFIED) {
if (traceOn) logger.info("RETURN: " + topTMFrame.lastMatch);
return topTMFrame.lastMatch;
} else {
Triple t = Triple.create(deref(pVars[0]), deref(pVars[1]), derefPossFunctor(pVars[2]));
if (traceOn) logger.info("RETURN: " + t);
return t;
}
}
/**
* Preserve the current interpreter state in the given
* @return
*/
/**
* Return the engine which owns this interpreter.
*/
public LPBRuleEngine getEngine() {
return engine;
}
/**
* Return the current choice point frame that can be used to restart the
* interpter at this point.
*/
public FrameObject getChoiceFrame() {
return cpFrame;
}
/**
* Return the context in which this interpreter is running, that is
* either the Generator for a tabled goal or a top level iterator.
*/
public LPInterpreterContext getContext() {
return iContext;
}
// =======================================================================
// Engine implementation
/**
* Restore the current choice point and restart execution of the LP code
* until either find a successful branch (in which case exit with StateFlag.ACTIVE
* and variables bound to the correct results) or exhaust all choice points (in
* which case exit with StateFlag.FAIL and no bound results). In future tabled
* version could also exit with StateFlag.SUSPEND in cases whether the intepreter
* needs to suspend to await tabled results from a parallel proof tree.
*/
protected StateFlag run() {
int pc = 0; // Program code counter
int ac = 0; // Program arg code counter
RuleClauseCode clause = null; // The clause being executed
ChoicePointFrame choice = null;
byte[] code;
Object[] args;
boolean traceOn = engine.isTraceOn();
boolean recordDerivations = engine.getDerivationLogging();
main: while (cpFrame != null) {
// restore choice point
if (cpFrame instanceof ChoicePointFrame) {
choice = (ChoicePointFrame)cpFrame;
if (!choice.hasNext()) {
// No more choices left in this choice point
cpFrame = choice.getLink();
if (traceOn) logger.info("FAIL in clause " + choice.envFrame.clause + " choices exhausted");
continue main;
}
clause = choice.nextClause();
// Create an execution environment for the new choice of clause
if (recordDerivations) {
envFrame = new EnvironmentFrameWithDerivation(clause);
} else {
envFrame = new EnvironmentFrame(clause);
}
envFrame.linkTo(choice.envFrame);
envFrame.cpc = choice.cpc;
envFrame.cac = choice.cac;
// Restore the choice point state
System.arraycopy(choice.argVars, 0, argVars, 0, RuleClauseCode.MAX_ARGUMENT_VARS);
int trailMark = choice.trailIndex;
if (trailMark < trail.size()) {
unwindTrail(trailMark);
}
pc = ac = 0;
if (recordDerivations) {
((EnvironmentFrameWithDerivation)envFrame).initDerivationRecord(argVars);
}
if (traceOn) logger.info("ENTER " + clause + " : " + getArgTrace());
// then fall through into the recreated execution context for the new call
} else if (cpFrame instanceof TripleMatchFrame) {
TripleMatchFrame tmFrame = (TripleMatchFrame)cpFrame;
// Restore the calling context
envFrame = tmFrame.envFrame;
clause = envFrame.clause;
int trailMark = tmFrame.trailIndex;
if (trailMark < trail.size()) {
unwindTrail(trailMark);
}
// Find the next choice result directly
if (!tmFrame.nextMatch(this)) {
// No more matches
cpFrame = cpFrame.getLink();
if (traceOn) logger.info("TRIPLE match (" + tmFrame.goal +") -> FAIL");
continue main;
}
if (traceOn) {
logger.info("TRIPLE match (" + tmFrame.goal +") -> " + getArgTrace());
logger.info("RENTER " + clause);
}
pc = tmFrame.cpc;
ac = tmFrame.cac;
if (recordDerivations) {
if (envFrame instanceof EnvironmentFrameWithDerivation) {
((EnvironmentFrameWithDerivation)envFrame).noteMatch(tmFrame.goal, pc);
}
}
// then fall through to the execution context in which the the match was called
} else if (cpFrame instanceof TopLevelTripleMatchFrame) {
TopLevelTripleMatchFrame tmFrame = (TopLevelTripleMatchFrame)cpFrame;
// Find the next choice result directly
if (!tmFrame.nextMatch(this)) {
// No more matches
cpFrame = cpFrame.getLink();
if (traceOn) logger.info("TRIPLE match (" + tmFrame.goal +") -> FAIL");
continue main;
} else {
// Match but this is the top level so return the triple directly
if (traceOn) logger.info("TRIPLE match (" + tmFrame.goal +") ->");
return StateFlag.SATISFIED;
}
} else if (cpFrame instanceof ConsumerChoicePointFrame) {
ConsumerChoicePointFrame ccp = (ConsumerChoicePointFrame)cpFrame;
// Restore the calling context
envFrame = ccp.envFrame;
clause = envFrame.clause;
if (traceOn) logger.info("RESTORE " + clause + ", due to tabled goal " + ccp.generator.goal);
int trailMark = ccp.trailIndex;
if (trailMark < trail.size()) {
unwindTrail(trailMark);
}
// Find the next choice result directly
StateFlag state = ccp.nextMatch(this);
if (state == StateFlag.FAIL) {
// No more matches
cpFrame = cpFrame.getLink();
if (traceOn) logger.info("FAIL " + clause);
continue main;
} else if (state == StateFlag.SUSPEND) {
// Require other generators to cycle before resuming this one
preserveState(ccp);
iContext.notifyBlockedOn(ccp);
cpFrame = cpFrame.getLink();
if (traceOn)logger.info("SUSPEND " + clause);
continue main;
}
pc = ccp.cpc;
ac = ccp.cac;
if (recordDerivations) {
if (envFrame instanceof EnvironmentFrameWithDerivation) {
((EnvironmentFrameWithDerivation)envFrame).noteMatch(ccp.goal, pc);
}
}
// then fall through to the execution context in which the the match was called
} else {
throw new ReasonerException("Internal error in backward rule system, unrecognized choice point");
}
engine.incrementProfile(clause);
interpreter: while (envFrame != null) {
// Start of bytecode intepreter loop
// Init the state variables
pVars = envFrame.pVars;
int yi, ai, ti;
Node arg, constant;
code = clause.getCode();
args = clause.getArgs();
while (true) {
switch (code[pc++]) {
case RuleClauseCode.TEST_BOUND:
ai = code[pc++];
if (deref(argVars[ai]).isVariable()) {
if (traceOn) logger.info("FAIL " + clause);
continue main;
}
break;
case RuleClauseCode.TEST_UNBOUND:
ai = code[pc++];
if (! deref(argVars[ai]).isVariable()) {
if (traceOn) logger.info("FAIL " + clause);
continue main;
}
break;
case RuleClauseCode.ALLOCATE:
int envSize = code[pc++];
envFrame.allocate(envSize);
pVars = envFrame.pVars;
break;
case RuleClauseCode.GET_VARIABLE :
yi = code[pc++];
ai = code[pc++];
pVars[yi] = argVars[ai];
break;
case RuleClauseCode.GET_TEMP :
ti = code[pc++];
ai = code[pc++];
tVars[ti] = argVars[ai];
break;
case RuleClauseCode.GET_CONSTANT :
ai = code[pc++];
arg = argVars[ai];
if (arg instanceof Node_RuleVariable) arg = ((Node_RuleVariable)arg).deref();
constant = (Node) args[ac++];
if (arg instanceof Node_RuleVariable) {
bind(arg, constant);
} else {
if (!arg.sameValueAs(constant)) {
if (traceOn) logger.info("FAIL " + clause);
continue main;
}
}
break;
case RuleClauseCode.GET_FUNCTOR:
Functor func = (Functor)args[ac++];
boolean match = false;
Node o = argVars[2];
if (o instanceof Node_RuleVariable) o = ((Node_RuleVariable)o).deref();
if (Functor.isFunctor(o)) {
Functor funcArg = (Functor)o.getLiteralValue();
if (funcArg.getName().equals(func.getName())) {
if (funcArg.getArgLength() == func.getArgLength()) {
Node[] fargs = funcArg.getArgs();
for (int i = 0; i < fargs.length; i++) {
argVars[i+3] = fargs[i];
}
match = true;
}
}
} else if (o.isVariable()) {
// Construct a new functor in place
Node[] fargs = new Node[func.getArgLength()];
Node[] templateArgs = func.getArgs();
for (int i = 0; i < fargs.length; i++) {
Node template = templateArgs[i];
if (template.isVariable()) template = new Node_RuleVariable(null, i+3);
fargs[i] = template;
argVars[i+3] = template;
}
Node newFunc = Functor.makeFunctorNode(func.getName(), fargs);
bind(((Node_RuleVariable)o).deref(), newFunc);
match = true;
}
if (!match) {
if (traceOn) logger.info("FAIL " + clause);
continue main; // fail to unify functor shape
}
break;
case RuleClauseCode.UNIFY_VARIABLE :
yi = code[pc++];
ai = code[pc++];
if (!unify(argVars[ai], pVars[yi])) {
if (traceOn) logger.info("FAIL " + clause);
continue main;
}
break;
case RuleClauseCode.UNIFY_TEMP :
ti = code[pc++];
ai = code[pc++];
if (!unify(argVars[ai], tVars[ti])) {
if (traceOn) logger.info("FAIL " + clause);
continue main;
}
break;
case RuleClauseCode.PUT_NEW_VARIABLE:
yi = code[pc++];
ai = code[pc++];
argVars[ai] = pVars[yi] = new Node_RuleVariable(null, yi);
break;
case RuleClauseCode.PUT_VARIABLE:
yi = code[pc++];
ai = code[pc++];
argVars[ai] = pVars[yi];
break;
case RuleClauseCode.PUT_DEREF_VARIABLE:
yi = code[pc++];
ai = code[pc++];
argVars[ai] = deref(pVars[yi]);
break;
case RuleClauseCode.PUT_TEMP:
ti = code[pc++];
ai = code[pc++];
argVars[ai] = tVars[ti];
break;
case RuleClauseCode.PUT_CONSTANT:
ai = code[pc++];
argVars[ai] = (Node)args[ac++];
break;
case RuleClauseCode.CLEAR_ARG:
ai = code[pc++];
argVars[ai] = new Node_RuleVariable(null, ai);
break;
case RuleClauseCode.MAKE_FUNCTOR:
Functor f = (Functor)args[ac++];
Node[] fargs = new Node[f.getArgLength()];
System.arraycopy(argVars, 3, fargs, 0, fargs.length);
argVars[2] = Functor.makeFunctorNode(f.getName(), fargs);
break;
case RuleClauseCode.LAST_CALL_PREDICATE:
// TODO: improved implementation of last call case
case RuleClauseCode.CALL_PREDICATE:
List<RuleClauseCode> clauses = ((RuleClauseCodeList) args[ac++]).getList();
// Check if this call is now grounded
boolean groundCall = isGrounded(argVars[0]) && isGrounded(argVars[1]) && isGrounded(argVars[2]);
setupClauseCall(pc, ac, clauses, groundCall);
setupTripleMatchCall(pc, ac);
continue main;
case RuleClauseCode.CALL_PREDICATE_INDEX:
// This code path is experimental, don't yet know if it has enough
// performance benefit to justify the cost of maintaining it.
clauses = ((RuleClauseCodeList) args[ac++]).getList();
// Check if we can futher index the clauses
if (!argVars[2].isVariable()) {
clauses = engine.getRuleStore().codeFor(
new TriplePattern(argVars[0], argVars[1], argVars[2]));
}
setupClauseCall(pc, ac, clauses, false);
setupTripleMatchCall(pc, ac);
continue main;
case RuleClauseCode.CALL_TRIPLE_MATCH:
setupTripleMatchCall(pc, ac);
continue main;
case RuleClauseCode.CALL_TABLED:
setupTabledCall(pc, ac);
continue main;
case RuleClauseCode.CALL_WILD_TABLED:
Node predicate = deref(argVars[1]);
if (engine.getRuleStore().isTabled(predicate)) {
setupTabledCall(pc, ac);
} else {
// normal call set up
clauses = engine.getRuleStore().codeFor(
new TriplePattern(argVars[0], predicate, argVars[2]));
if (clauses != null) setupClauseCall(pc, ac, clauses, false);
setupTripleMatchCall(pc, ac);
}
continue main;
case RuleClauseCode.PROCEED:
pc = envFrame.cpc;
ac = envFrame.cac;
if (traceOn) logger.info("EXIT " + clause);
if (choice != null) choice.noteSuccess();
if (recordDerivations && envFrame.getRule() != null) {
if (envFrame instanceof EnvironmentFrameWithDerivation) {
EnvironmentFrameWithDerivation efd = (EnvironmentFrameWithDerivation) envFrame;
Triple result = efd.getResult();
List<Triple> matches = efd.getMatchList();
BackwardRuleInfGraphI infGraph = engine.getInfGraph();
RuleDerivation d = new RuleDerivation(envFrame.getRule(), result, matches, infGraph);
infGraph.logDerivation(result, d);
// Also want to record this result in the calling frame
if (envFrame.link instanceof EnvironmentFrameWithDerivation) {
EnvironmentFrameWithDerivation pefd = (EnvironmentFrameWithDerivation)envFrame.link;
pefd.noteMatch(new TriplePattern(result), pc);
}
}
}
envFrame = (EnvironmentFrame) envFrame.link;
if (envFrame != null) {
clause = envFrame.clause;
}
continue interpreter;
case RuleClauseCode.CALL_BUILTIN:
Builtin builtin = (Builtin)args[ac++];
if (context == null) {
BBRuleContext bbcontext = new BBRuleContext(engine.getInfGraph());
bbcontext.setEnv(new LPBindingEnvironment(this));
context = bbcontext;
}
context.setRule(clause.getRule());
if (!builtin.bodyCall(argVars, code[pc++], context)) {
if (traceOn) logger.info("FAIL " + clause + ", due to " + builtin.getName());
continue main;
}
break;
default :
throw new ReasonerException("Internal error in backward rule system\nIllegal op code");
}
}
// End of innter code loop
}
// End of bytecode interpreter loop, gets to here if we complete an AND chain
return StateFlag.ACTIVE;
}
// Gets to here if we have run out of choice point frames
return StateFlag.FAIL;
}
/**
* Tracing support - return a format set of triple queries/results.
*/
private String getArgTrace() {
StringBuilder temp = new StringBuilder();
temp.append(PrintUtil.print(deref(argVars[0])));
temp.append(" ");
temp.append(PrintUtil.print(deref(argVars[1])));
temp.append(" ");
temp.append(PrintUtil.print(deref(argVars[2])));
return temp.toString();
}
/**
* Set up a triple match choice point as part of a CALL.
*/
private void setupTripleMatchCall(int pc, int ac) {
TripleMatchFrame tmFrame = new TripleMatchFrame(this);
tmFrame.setContinuation(pc, ac);
tmFrame.linkTo(cpFrame);
cpFrame = tmFrame;
}
/**
* Set up a clause choice point as part of a CALL.
*/
private void setupClauseCall(int pc, int ac, List<RuleClauseCode> clauses, boolean isSingleton) {
ChoicePointFrame newChoiceFrame = new ChoicePointFrame(this, clauses, isSingleton);
newChoiceFrame.linkTo(cpFrame);
newChoiceFrame.setContinuation(pc, ac);
cpFrame = newChoiceFrame;
}
/**
* Set up a tabled choice point as part of a CALL.
*/
private void setupTabledCall(int pc, int ac) {
ConsumerChoicePointFrame ccp = new ConsumerChoicePointFrame(this);
ccp.linkTo(cpFrame);
ccp.setContinuation(pc, ac);
cpFrame = ccp;
}
/**
* Preserve the current interpter state in the consumer choice point at the top
* of the choice point tree.
*/
public void preserveState(ConsumerChoicePointFrame ccp) {
ccp.preserveState(trail);
}
/**
* Restore the interpter state according to the given consumer choice point.
*/
public void restoreState(ConsumerChoicePointFrame ccp) {
cpFrame = ccp;
ccp.restoreState(this);
iContext = ccp.context;
}
/**
* Unify two nodes. Current implementation does not support functors.
* @return true if the unification succeeds
*/
public boolean unify(Node n1, Node n2) {
Node nv1 = n1;
if (nv1 instanceof Node_RuleVariable) {
nv1 = ((Node_RuleVariable)n1).deref();
}
Node nv2 = n2;
if (nv2 instanceof Node_RuleVariable) {
nv2 = ((Node_RuleVariable)n2).deref();
}
if (nv1 instanceof Node_RuleVariable) {
bind(nv1, nv2);
return true;
} else if (nv2 instanceof Node_RuleVariable) {
bind(nv2, nv1);
return true;
} else {
return nv1.sameValueAs(nv2);
}
}
/**
* Bind a value to a variable, recording the binding in the trail.
* @param var the dereferenced variable to be bound
* @param val the value to bind to it
*/
public void bind(Node var, Node val) {
((Node_RuleVariable)var).simpleBind(val);
trail.add(var);
}
/**
* Unwind the trail to given low water mark
*/
public void unwindTrail(int mark) {
for (int i = trail.size()-1; i >= mark; i--) {
Node_RuleVariable var = (Node_RuleVariable)trail.get(i);
var.unbind();
trail.remove(i);
}
}
/**
* Dereference a node, following any binding trail.
*/
public static Node deref(Node node) {
if (node instanceof Node_RuleVariable) {
return ((Node_RuleVariable)node).deref();
} else {
return node;
}
}
/**
* Check if a node values is now grounded
*/
public static boolean isGrounded(Node node) {
return !( deref(node) instanceof Node_RuleVariable );
}
/**
* Return a dereferenced copy of a triple.
*/
public static Triple deref(TriplePattern t) {
if (t == null) return null;
return Triple.create(deref(t.getSubject()), deref(t.getPredicate()), deref(t.getObject()));
}
/**
* Dereference a node which may be a functor node
*/
public static Node derefPossFunctor(Node node) {
if (node instanceof Node_RuleVariable) {
Node dnode = ((Node_RuleVariable)node).deref();
if (dnode.isVariable()) {
// Problem with variable in return result "should never happen"
throw new ReasonerException("Internal error in LP reasoner: variable in triple result");
}
if (Functor.isFunctor(dnode)) {
Functor f = (Functor) dnode.getLiteralValue();
Node[] fargs = f.getArgs();
boolean needCopy = false;
for ( Node farg : fargs )
{
if ( farg.isVariable() )
{
needCopy = true;
break;
}
}
if (needCopy) {
Node[] newArgs = new Node[fargs.length];
for (int i = 0; i < fargs.length; i++) {
newArgs[i] = deref(fargs[i]);
}
dnode = Functor.makeFunctorNode(f.getName(), newArgs);
}
return dnode;
} else {
return dnode;
}
} else {
return node;
}
}
/**
* Standardize a node by replacing instances of wildcard ANY by new distinct variables.
* This is used in constructing the arguments to a top level call from a goal pattern.
* @param node the node to be standardized
* @param mappedVars known mappings from input variables to local variables
*/
private Node standardize(Node node, Map<Node, Node> mappedVars) {
Node dnode = deref(node);
if (node == Node.ANY || node == Node_RuleVariable.WILD) {
return new Node_RuleVariable(null, 0);
} else if (dnode.isVariable()) {
Node mnode = mappedVars.get(dnode);
if (mnode == null) {
mnode = new Node_RuleVariable(null, 0);
mappedVars.put(dnode, mnode);
}
return mnode;
} else {
return dnode;
}
}
}
|
googleapis/google-cloud-java | 35,498 | java-meet/proto-google-cloud-meet-v2/src/main/java/com/google/apps/meet/v2/ListRecordingsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/apps/meet/v2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.apps.meet.v2;
/**
*
*
* <pre>
* Response for ListRecordings method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListRecordingsResponse}
*/
public final class ListRecordingsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.apps.meet.v2.ListRecordingsResponse)
ListRecordingsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRecordingsResponse.newBuilder() to construct.
private ListRecordingsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRecordingsResponse() {
recordings_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRecordingsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListRecordingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListRecordingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListRecordingsResponse.class,
com.google.apps.meet.v2.ListRecordingsResponse.Builder.class);
}
public static final int RECORDINGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.apps.meet.v2.Recording> recordings_;
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.apps.meet.v2.Recording> getRecordingsList() {
return recordings_;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.apps.meet.v2.RecordingOrBuilder>
getRecordingsOrBuilderList() {
return recordings_;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
@java.lang.Override
public int getRecordingsCount() {
return recordings_.size();
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.Recording getRecordings(int index) {
return recordings_.get(index);
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.RecordingOrBuilder getRecordingsOrBuilder(int index) {
return recordings_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < recordings_.size(); i++) {
output.writeMessage(1, recordings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < recordings_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, recordings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.apps.meet.v2.ListRecordingsResponse)) {
return super.equals(obj);
}
com.google.apps.meet.v2.ListRecordingsResponse other =
(com.google.apps.meet.v2.ListRecordingsResponse) obj;
if (!getRecordingsList().equals(other.getRecordingsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRecordingsCount() > 0) {
hash = (37 * hash) + RECORDINGS_FIELD_NUMBER;
hash = (53 * hash) + getRecordingsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListRecordingsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.apps.meet.v2.ListRecordingsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListRecordings method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListRecordingsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.apps.meet.v2.ListRecordingsResponse)
com.google.apps.meet.v2.ListRecordingsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListRecordingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListRecordingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListRecordingsResponse.class,
com.google.apps.meet.v2.ListRecordingsResponse.Builder.class);
}
// Construct using com.google.apps.meet.v2.ListRecordingsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (recordingsBuilder_ == null) {
recordings_ = java.util.Collections.emptyList();
} else {
recordings_ = null;
recordingsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListRecordingsResponse_descriptor;
}
@java.lang.Override
public com.google.apps.meet.v2.ListRecordingsResponse getDefaultInstanceForType() {
return com.google.apps.meet.v2.ListRecordingsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.apps.meet.v2.ListRecordingsResponse build() {
com.google.apps.meet.v2.ListRecordingsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.apps.meet.v2.ListRecordingsResponse buildPartial() {
com.google.apps.meet.v2.ListRecordingsResponse result =
new com.google.apps.meet.v2.ListRecordingsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.apps.meet.v2.ListRecordingsResponse result) {
if (recordingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
recordings_ = java.util.Collections.unmodifiableList(recordings_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.recordings_ = recordings_;
} else {
result.recordings_ = recordingsBuilder_.build();
}
}
private void buildPartial0(com.google.apps.meet.v2.ListRecordingsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.apps.meet.v2.ListRecordingsResponse) {
return mergeFrom((com.google.apps.meet.v2.ListRecordingsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.apps.meet.v2.ListRecordingsResponse other) {
if (other == com.google.apps.meet.v2.ListRecordingsResponse.getDefaultInstance()) return this;
if (recordingsBuilder_ == null) {
if (!other.recordings_.isEmpty()) {
if (recordings_.isEmpty()) {
recordings_ = other.recordings_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRecordingsIsMutable();
recordings_.addAll(other.recordings_);
}
onChanged();
}
} else {
if (!other.recordings_.isEmpty()) {
if (recordingsBuilder_.isEmpty()) {
recordingsBuilder_.dispose();
recordingsBuilder_ = null;
recordings_ = other.recordings_;
bitField0_ = (bitField0_ & ~0x00000001);
recordingsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRecordingsFieldBuilder()
: null;
} else {
recordingsBuilder_.addAllMessages(other.recordings_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.apps.meet.v2.Recording m =
input.readMessage(
com.google.apps.meet.v2.Recording.parser(), extensionRegistry);
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(m);
} else {
recordingsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.apps.meet.v2.Recording> recordings_ =
java.util.Collections.emptyList();
private void ensureRecordingsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
recordings_ = new java.util.ArrayList<com.google.apps.meet.v2.Recording>(recordings_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Recording,
com.google.apps.meet.v2.Recording.Builder,
com.google.apps.meet.v2.RecordingOrBuilder>
recordingsBuilder_;
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.Recording> getRecordingsList() {
if (recordingsBuilder_ == null) {
return java.util.Collections.unmodifiableList(recordings_);
} else {
return recordingsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public int getRecordingsCount() {
if (recordingsBuilder_ == null) {
return recordings_.size();
} else {
return recordingsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2.Recording getRecordings(int index) {
if (recordingsBuilder_ == null) {
return recordings_.get(index);
} else {
return recordingsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder setRecordings(int index, com.google.apps.meet.v2.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.set(index, value);
onChanged();
} else {
recordingsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder setRecordings(
int index, com.google.apps.meet.v2.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.set(index, builderForValue.build());
onChanged();
} else {
recordingsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder addRecordings(com.google.apps.meet.v2.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.add(value);
onChanged();
} else {
recordingsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder addRecordings(int index, com.google.apps.meet.v2.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.add(index, value);
onChanged();
} else {
recordingsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder addRecordings(com.google.apps.meet.v2.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(builderForValue.build());
onChanged();
} else {
recordingsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder addRecordings(
int index, com.google.apps.meet.v2.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(index, builderForValue.build());
onChanged();
} else {
recordingsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder addAllRecordings(
java.lang.Iterable<? extends com.google.apps.meet.v2.Recording> values) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, recordings_);
onChanged();
} else {
recordingsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder clearRecordings() {
if (recordingsBuilder_ == null) {
recordings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
recordingsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public Builder removeRecordings(int index) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.remove(index);
onChanged();
} else {
recordingsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2.Recording.Builder getRecordingsBuilder(int index) {
return getRecordingsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2.RecordingOrBuilder getRecordingsOrBuilder(int index) {
if (recordingsBuilder_ == null) {
return recordings_.get(index);
} else {
return recordingsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public java.util.List<? extends com.google.apps.meet.v2.RecordingOrBuilder>
getRecordingsOrBuilderList() {
if (recordingsBuilder_ != null) {
return recordingsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(recordings_);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2.Recording.Builder addRecordingsBuilder() {
return getRecordingsFieldBuilder()
.addBuilder(com.google.apps.meet.v2.Recording.getDefaultInstance());
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2.Recording.Builder addRecordingsBuilder(int index) {
return getRecordingsFieldBuilder()
.addBuilder(index, com.google.apps.meet.v2.Recording.getDefaultInstance());
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.Recording recordings = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.Recording.Builder> getRecordingsBuilderList() {
return getRecordingsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Recording,
com.google.apps.meet.v2.Recording.Builder,
com.google.apps.meet.v2.RecordingOrBuilder>
getRecordingsFieldBuilder() {
if (recordingsBuilder_ == null) {
recordingsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.Recording,
com.google.apps.meet.v2.Recording.Builder,
com.google.apps.meet.v2.RecordingOrBuilder>(
recordings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
recordings_ = null;
}
return recordingsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.apps.meet.v2.ListRecordingsResponse)
}
// @@protoc_insertion_point(class_scope:google.apps.meet.v2.ListRecordingsResponse)
private static final com.google.apps.meet.v2.ListRecordingsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.apps.meet.v2.ListRecordingsResponse();
}
public static com.google.apps.meet.v2.ListRecordingsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRecordingsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRecordingsResponse>() {
@java.lang.Override
public ListRecordingsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRecordingsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRecordingsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.apps.meet.v2.ListRecordingsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/royale-compiler | 35,590 | compiler/src/main/java/org/apache/royale/abc/models/TreeModelEncoder.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.royale.abc.models;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.royale.abc.semantics.ExceptionInfo;
import org.apache.royale.abc.semantics.Instruction;
import org.apache.royale.abc.semantics.MethodBodyInfo;
import org.apache.royale.abc.visitors.IDiagnosticsVisitor;
import org.apache.royale.abc.visitors.NilDiagnosticsVisitor;
import org.apache.royale.abc.graph.IFlowgraph;
import org.apache.royale.abc.graph.IBasicBlock;
import org.apache.royale.abc.graph.algorithms.DominatorTree.Multimap;
/**
* The TreeModelEncoder translates the stack-oriented semantics
* of ABC bytecode into a tree-oriented model.
*/
public class TreeModelEncoder<T>
{
/**
* Construct a new TreeModelEncoder.
* @param mbi - the method body of interest.
* @param visitor - the TreeModelVisitor that's interested.
* @param diagnosticsVisitor - a handler for any diagnostics generated.
*/
public TreeModelEncoder( MethodBodyInfo mbi, TreeModelVisitor<T> visitor, IDiagnosticsVisitor diagnosticsVisitor)
{
this.mbi = mbi;
this.visitor = visitor;
this.diagnosticsVisitor = diagnosticsVisitor;
this.visitor.visit(this);
setUpFrames();
placePhiNodes();
visitFrames();
this.visitor.visitEnd();
}
/**
* The Method of interest.
*/
private final MethodBodyInfo mbi;
/**
* The model visitor we're driving.
*/
private final TreeModelVisitor<T> visitor;
/**
* Receiver of any diagnostic output.
*/
private final IDiagnosticsVisitor diagnosticsVisitor;
/**
* The Block currently under examination.
*/
IBasicBlock currentBlock;
/**
* The FrameModelEncoder that's driving this TreeModelEncoder.
*/
private FrameModelEncoder encoder;
/**
* The symbolic representations of this method's locals.
*/
ArrayList<Object> localSymbolicReferences = new ArrayList<Object>();
/**
* The symbolic representations of this method's value stack slots.
* Note: There is only one symbol for each element of the stack itself,
* not a symbol for each value the stack will hold over its lifetime.
*/
ArrayList<Object> valueSymbolicReferences = new ArrayList<Object>();
/**
* The symbolic representations of this method's scope stack slots.
* Note: There is only one symbol for each element of the stack itself,
* not a symbol for each scope the stack will hold over its lifetime.
*/
ArrayList<Object> scopeSymbolicReferences = new ArrayList<Object>();
/**
* Blocks modifying frame elements, keyed by
* the symbolic representation of the frame element.
*/
private Map<Object,Set<IBasicBlock>> a = new HashMap<Object,Set<IBasicBlock>>();
/**
* Get the MethodBodyInfo of the method being analyzed.
* @return the MethodBodyInfo being analyzed.
*/
public MethodBodyInfo getMethodBodyInfo()
{
return this.mbi;
}
/**
* Get the method's flowgraph.
* @return the IFlowGraph of the method being analyzed.
*/
public IFlowgraph getCfg()
{
return this.mbi.getCfg();
}
/**
* Get the block currently being analyzed.
* @return the block currently being analyzed.
*/
public IBasicBlock getCurrentBlock()
{
return this.currentBlock;
}
/**
* Get the index of the instruction currently being analyzed.
* @return the intra-Block index of the instruction currently being analyzed.
*/
public int getInstructionIndex()
{
return this.encoder.getInstructionIndex();
}
/**
* Do a preliminary pass over the method to set up
* the frames' extents and live-out sets.
*/
private void setUpFrames()
{
mbi.getCfg().traverseGraph(
new FrameModelEncoder(
this.mbi,
new FrameSetupVisitor(),
new NilDiagnosticsVisitor()
)
);
}
/**
* Place phi-nodes at blocks' dominance frontiers
* to model dataflow merges in frame state.
*/
private void placePhiNodes()
{
int iterCount = 0;
Multimap<IBasicBlock> df = getCfg().getDominatorTree().getDominanceFrontiers();
Map<IBasicBlock, Integer> hasAlready = new HashMap<IBasicBlock,Integer>();
Map<IBasicBlock, Integer> work = new HashMap<IBasicBlock, Integer>();
for ( Object local: localSymbolicReferences )
iterCount = placePhiNodes(iterCount, local, df, a, hasAlready, work);
for ( Object scope: scopeSymbolicReferences )
iterCount = placePhiNodes(iterCount, scope, df, a, hasAlready, work);
for ( Object value: valueSymbolicReferences )
iterCount = placePhiNodes(iterCount, value, df, a, hasAlready, work);
}
/**
* Place phi-nodes for one particular element of the frame.
* @param initialIteration - the initial value of the iteration counter.
* @param v - the frame element being analyzed.
* @param df - the method's dominance frontiers.
* @param a - the map of Blocks that assign to various frame elements.
* @param hasAlready - a map of Block-to-iteration-count values, used
* to note a Block that already has a phi node for the element.
* @param work - a map of Block-to-iteration-count values, used
* to find Blocks that need further analysis.
*/
private int placePhiNodes(
final int initialIteration,
final Object v,
Multimap<IBasicBlock> df,
Map<Object,Set<IBasicBlock>> a,
Map<IBasicBlock,Integer> hasAlready,
Map<IBasicBlock,Integer> work
)
{
if ( ! a.containsKey(v) )
return initialIteration;
int iterCount = initialIteration + 1;
HashSet<IBasicBlock> w = new HashSet<IBasicBlock>();
for ( IBasicBlock x: a.get(v) )
{
work.put(x, iterCount);
w.add(x);
}
while ( ! w.isEmpty() )
{
Iterator<IBasicBlock> it = w.iterator();
IBasicBlock x = it.next();
it.remove();
for ( IBasicBlock y: df.get(x) )
{
if ( !hasAlready.containsKey(y) || hasAlready.get(y).intValue() < iterCount )
{
placePhiNode(y,v);
hasAlready.put(y, iterCount);
if ( !work.containsKey(y) || work.get(y).intValue() < iterCount )
{
work.put(y, iterCount);
w.add(y);
}
}
}
}
return iterCount;
}
/**
* Place a phi-node for a particular (Block,frame element) tuple.
* @param target - the Block.
* @param frameKey - the variable.
*/
void placePhiNode(IBasicBlock target, Object frameKey)
{
Frame targetFrame = getFrame(target);
// Figure out which value this is.
int idx = this.localSymbolicReferences.indexOf(frameKey);
if ( idx != -1 )
{
if ( needsInitializer(targetFrame.locals, idx) )
setFrameElement(targetFrame.locals, idx, visitor.addMergePoint(currentBlock));
}
else
{
idx = this.valueSymbolicReferences.indexOf(frameKey);
if ( idx != -1 )
{
if ( needsInitializer(targetFrame.values, idx) )
setFrameElement(targetFrame.values, idx, visitor.addMergePoint(currentBlock));
}
else
{
idx = scopeSymbolicReferences.indexOf(frameKey);
assert idx != -1;
if ( needsInitializer(targetFrame.scopes, idx) )
setFrameElement(targetFrame.scopes, idx, visitor.addMergePoint(currentBlock));
}
}
}
/**
* Determine if a particular frame element needs an initializer.
* @param elements - the frame elements (locals, scope stack, or value stack).
* @param idx - the index of the frame element of interest.
* @return true if the given index has no initializer object.
*/
private boolean needsInitializer(ArrayList<? extends Object> elements, int idx)
{
return ( elements.size() <= idx || elements.get(idx) == null );
}
/**
* Initialize a frame element with an anonymous marker object if necessary.
* @param elements - the frame elements (locals, scope stack, or value stack).
* @param idx - the index of the frame element of interest.
*/
private void touchFrameElement(ArrayList<Object> elements, int idx)
{
if ( needsInitializer(elements, idx) )
setFrameElement(elements, idx, new Object());
}
/**
* Set a frame element.
* @param elements - the frame elements (locals, scope stack, or value stack).
* @param idx - the index of the frame element of interest.
* @param value - the value to set.
*/
private <E> void setFrameElement(ArrayList<E> elements, int idx, final E value)
{
while ( elements.size() <= idx )
elements.add(null);
elements.set(idx, value);
}
/**
* Modify a frame element; touch it and add the modifying Block to its
* set of assigning blocks.
* @param elements - the frame elements (locals, scope stack, or value stack).
* @param idx - the index of the frame element of interest.
* @param b - the Block that modifies the element.
*/
private void modifyFrameElement(ArrayList<Object> elements, int idx, IBasicBlock b)
{
touchFrameElement(elements,idx);
if ( ! a.containsKey(elements.get(idx)) )
a.put(elements.get(idx), new HashSet<IBasicBlock>());
a.get(elements.get(idx)).add(b);
}
/**
* Visit each block, its associated Frame, and the
* instructions in each block, showing each in turn
* to the TreeModelVisitor and recording its results
* in the Frame.
*/
private void visitFrames()
{
ArrayList<T> parameters = new ArrayList<T>();
// Load the initial frame's locals with parameter information.
for ( int i = 0; i < this.mbi.getMethodInfo().getParamCount(); i++ )
parameters.add(visitor.translateParameter(i));
Frame startFrame = getFrame(this.mbi.getCfg().getStartBlock());
startFrame.locals.addAll(parameters);
// Initialize exception-handling targets with the exception variable,
// and set up a dataflow merge node for each local that may be read
// in the block.
// TODO: This encoder makes pessimistic assumptions about dataflow
// into the locals, i.e., it assumes every exception handler is
// globally reachable.
for ( ExceptionInfo ex: this.mbi.getExceptions() )
{
IBasicBlock catchTarget = this.mbi.getCfg().getBlock(ex.getTarget());
Frame catchFrame = getFrame(catchTarget);
setFrameElement(catchFrame.values, 0, visitor.translateExceptionVariable(ex.getCatchVar(), ex.getExceptionType()));
for ( int i = 0; i < parameters.size(); i++ )
{
catchFrame.locals.add(visitor.addMergePoint(catchTarget));
@SuppressWarnings("unchecked")
TreeModelVisitor.IMergePoint<T> mergeNode = (TreeModelVisitor.IMergePoint<T>)catchFrame.locals.get(i);
mergeNode.addValue(parameters.get(i));
}
// Initialize the other locals' merge nodes, which are initially empty.
for ( int i = parameters.size(); i < localSymbolicReferences.size(); i++ )
{
catchFrame.locals.add(visitor.addMergePoint(catchTarget));
}
}
this.encoder = new FrameModelEncoder( this.mbi, new ModelDrivingVisitor(this.visitor), this.diagnosticsVisitor );
this.mbi.getCfg().traverseGraph( this.encoder );
}
/**
* A representation of an AVM "frame," the local variables, scope stack slots,
* and value stack slots used in a particular Block of the method's flowgraph.
*/
public class Frame
{
/**
* The local variables used in the Block.
*/
public final ArrayList<T> locals = new ArrayList<T>();
/**
* The scope stack slots used in the Block.
*/
public final ArrayList<T> scopes = new ArrayList<T>();
/**
* The value stack slots used in the Block.
*/
public final ArrayList<T> values = new ArrayList<T>();
/**
* @return the value on top of the value stack.
*/
public T tos()
{
return this.values.get(valueStackDepth());
}
/**
* Remove a value from the value stack.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T popValue()
{
return popElement(this.values);
}
/**
* Push a value onto the value stack.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T pushValue(T value)
{
this.values.add(value);
return value;
}
/**
* Push a value onto the scope stack.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T pushScope(T scope)
{
this.scopes.add(scope);
return scope;
}
/**
* Pop a value off the scope stack.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T popScope()
{
return popElement(this.scopes);
}
/**
* Get a local variable.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T getlocal(int idx)
{
adjustSize(this.locals, idx+1);
return this.locals.get(idx);
}
/**
* Set a local variable.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T setlocal(int idx, T value)
{
adjustSize(this.locals, idx+1);
this.locals.set(idx, value);
propagateLocalToCatchBlocks(idx,value);
return value;
}
/**
* Get a scope object.
* Visitors must modify the Frame, but should
* maintain their own modifiable view of it.
*/
private T getscopeobject(int idx)
{
adjustSize(this.scopes, idx+1);
return this.scopes.get(idx);
}
/**
* Verify that the value stack contains
* at least the required number of live values.
* @param required - the numbe of values required.
* @return true if the value stack has the required number of values.
*/
private boolean verifyStackDepth(int required)
{
return this.values.size() >= required;
}
/**
* @return the number of live values on the value stack.
*/
private int valueStackDepth()
{
return this.values.size() - 1;
}
/**
* Verify that the scope stack contains
* at least the required number of live values.
* @param required - the numbe of values required.
* @return true if the scope stack has the required number of values.
*/
private boolean verifyScopeDepth(int required)
{
return this.scopes.size() >= required;
}
/**
* @return the number of live values on the scope stack.
*/
@SuppressWarnings("unused")
private int scopeStackDepth()
{
return this.scopes.size() - 1;
}
}
/**
* Propagate a local variable's value to all catch blocks.
* TODO: this should only propagate to catch blocks reachable
* from the current block; this code errs on the side of pessimism.
* @param idx the index of the local.
* @param value the value to propagate.
*/
void propagateLocalToCatchBlocks(int idx, T value)
{
for ( ExceptionInfo ex: mbi.getExceptions() )
{
IBasicBlock catchTarget = mbi.getCfg().getBlock(ex.getTarget());
Frame catchFrame = getFrame(catchTarget);
if ( catchFrame.locals.get(idx) instanceof TreeModelVisitor.IMergePoint )
{
@SuppressWarnings("unchecked")
TreeModelVisitor.IMergePoint<T> mergeNode = (TreeModelVisitor.IMergePoint<T>)catchFrame.locals.get(idx);
mergeNode.addValue(value);
}
// else it's an exception variable.
}
}
/**
* Active Frames, keyed by their generating Block.
*/
private Map<IBasicBlock,Frame> framesByBlock = new HashMap<IBasicBlock,Frame>();
/**
* Get the Frame that corresponds to a Block.
* @param b - the Block of interest.
* @return the Frame mapped to the Block.
*/
public Frame getFrame(IBasicBlock b)
{
if ( ! this.framesByBlock.containsKey(b) )
this.framesByBlock.put(b, new Frame());
return this.framesByBlock.get(b);
}
/**
* The FrameSetupVisitor creates Frame objects for the Blocks,
* and drives the modifyFrameElement calls that initialize
* the map of Blocks that assign values to specific frame elements.
*/
private class FrameSetupVisitor implements FrameModelVisitor<T>
{
IBasicBlock currentBlock = null;
BlockState blockState = null;
public void visit(FrameModelEncoder encoder)
{
}
public void visitEnd()
{
}
public T noFrameEffect(Instruction i)
{
return null;
}
public T consumeValue(Instruction i, int count)
{
// The model driving visitor detects stack underflow.
blockState.stackDepth = Math.max(blockState.stackDepth - count, 0);
return null;
}
/**
* Handle an instruction that pushes a value onto the stack.
* @param i - the Instruction.
*/
public T produceValue(Instruction i)
{
modifyFrameElement(valueSymbolicReferences, blockState.stackDepth, currentBlock);
blockState.stackDepth++;
return null;
}
public T consumeAndProduceValue(Instruction i, int consumeCount)
{
consumeValue(null, consumeCount);
produceValue(null);
return null;
}
public T branch(Instruction i, IBasicBlock target)
{
return null;
}
public T multiwayBranch(Instruction i, Collection<IBasicBlock> targets)
{
return null;
}
public T getlocal(Instruction i, int idx)
{
touchFrameElement(localSymbolicReferences, idx);
return null;
}
public T setlocal(Instruction i, int idx)
{
modifyFrameElement(localSymbolicReferences, idx, currentBlock);
return null;
}
public void modifyLocal(Instruction i, int idx)
{
modifyFrameElement(localSymbolicReferences, idx, currentBlock);
}
public T moveValueToScopeStack(Instruction i)
{
consumeValue(null, 1);
modifyFrameElement(scopeSymbolicReferences, blockState.scopeDepth, currentBlock);
blockState.scopeDepth++;
return null;
}
public T popscope(Instruction i)
{
blockState.scopeDepth = Math.max(blockState.scopeDepth - 1, 0);
return null;
}
public T getScopeobject(Instruction i, int idx)
{
touchFrameElement(scopeSymbolicReferences, idx);
return null;
}
public T hasnext2(Instruction i)
{
modifyFrameElement(localSymbolicReferences, (Integer)i.getOperand(0), currentBlock);
modifyFrameElement(localSymbolicReferences, (Integer)i.getOperand(1), currentBlock);
return null;
}
public T dup(Instruction i)
{
produceValue(null);
return null;
}
public T swap(Instruction i)
{
// No effect on the frame setup.
return null;
}
public boolean visitBlock(IBasicBlock b)
{
if ( visited.add(b) )
{
assert this.currentBlock == null;
this.currentBlock = b;
this.blockState = getBlockState(b);
return true;
}
else
{
return false;
}
}
/**
* Blocks visisted so far.
*/
private Set<IBasicBlock> visited = new HashSet<IBasicBlock>();
/**
* End the visit to a block; ensure that all its
* frame elements are in place.
*/
public void visitEndBlock(IBasicBlock b)
{
for ( int i = 0; i < this.blockState.stackDepth; i++ )
touchFrameElement(valueSymbolicReferences, i);
for ( int i = 0; i < this.blockState.scopeDepth; i++ )
touchFrameElement(scopeSymbolicReferences, i);
this.currentBlock = null;
this.blockState = null;
}
/**
* Propagate value/scope stack depth information
* from one Block to its target block.
*/
public void visitEdge(IBasicBlock from, IBasicBlock target)
{
assert(from == this.currentBlock);
BlockState targetState = getBlockState(target);
targetState.stackDepth = blockState.stackDepth;
targetState.scopeDepth = blockState.scopeDepth;
}
/**
* Get the scope/value stack depth tracker for a Block.
* @param b - the Block of interest.
* @return the BlockState tracker mapped to it.
*/
private BlockState getBlockState(IBasicBlock b)
{
if ( ! this.statesByBlock.containsKey(b) )
this.statesByBlock.put(b, new BlockState());
return this.statesByBlock.get(b);
}
private Map<IBasicBlock, BlockState> statesByBlock = new HashMap<IBasicBlock,BlockState>();
}
private static class BlockState
{
int stackDepth = 0;
int scopeDepth = 0;
}
/**
* The ModelDrivingVisitor makes a second pass over the method's
* control flow graph, after the Frames have been initialized and
* dataflow merge points placed, and drives the visitor's traversal
* of the method.
*/
private class ModelDrivingVisitor implements FrameModelVisitor<T>
{
ModelDrivingVisitor(TreeModelVisitor<T> visitor)
{
this.visitor = visitor;
}
final TreeModelVisitor<T> visitor;
public void visit(FrameModelEncoder encoder)
{
assert(encoder == TreeModelEncoder.this.encoder);
}
public void visitEnd()
{
TreeModelEncoder.this.encoder = null;
}
/**
* The Frame that corresponds to the block being visited.
*/
Frame currentFrame = null;
@Override
public T noFrameEffect(Instruction i)
{
return visitor.translate(i, noOperands());
}
/**
* Handle an instruction that consumes value stack elements.
* @param i - the Instruction.
* @param count - the number of value stack elements consumed.
*/
public T consumeValue(Instruction i, int count)
{
if ( this.currentFrame.verifyStackDepth(count) )
{
ArrayList<T> operands = new ArrayList<T>(count);
for ( int j = 0; j < count; j++ )
operands.add(this.currentFrame.popValue());
return visitor.translate(i, operands);
}
else
{
return visitor.valueStackUnderflow(i, count);
}
}
/**
* Handle an instruction that pushes a value onto the stack.
* @param i - the Instruction.
*/
public T produceValue(Instruction i)
{
return this.currentFrame.pushValue(visitor.translate(i, noOperands()));
}
/**
* Handle an instruction that consumes value stack elements,
* and then pushes a new value onto the stack.
* @param i - the Instruction.
* @param consumeCount - the number of value stack elements consumed.
*/
public T consumeAndProduceValue(Instruction i, int consumeCount)
{
if ( this.currentFrame.verifyStackDepth(consumeCount) )
{
ArrayList<T> operands = new ArrayList<T>(consumeCount);
for ( int j = 0; j < consumeCount; j++ )
operands.add(this.currentFrame.popValue());
return this.currentFrame.pushValue(visitor.translate(i, operands));
}
else
{
return visitor.valueStackUnderflow(i, consumeCount);
}
}
/**
* Handle a branch instruction.
* @param i - the Instruction.
* @param target - the Instruction's target. Instructions with
* fall-through semantics also implicitly target the next Block.
*/
public T branch(Instruction i, IBasicBlock target)
{
return visitor.translateBranch(i, singleOperand(target));
}
/**
* Handle a multibranch instruction.
* @param i - the Instruction.
* @param targets - the Instruction's targets.
*/
public T multiwayBranch(Instruction i, Collection<IBasicBlock> targets)
{
return visitor.translateBranch(i, targets);
}
/**
* Get a local variable, leaving its value on the stack.
* @param i - the Instruction.
* @param idx - the variable's index.
*/
public T getlocal(Instruction i, int idx)
{
adjustSize(currentFrame.locals, idx + 1);
T result = visitor.translate(i, singleOperand(this.currentFrame.getlocal(idx)));
this.currentFrame.pushValue(result);
return result;
}
/**
* Set a local variable, comsuming a value from the stack.
* @param i - the Instruction.
* @param idx - the variable's index.
*/
public T setlocal(Instruction i, int idx)
{
adjustSize(currentFrame.locals, idx + 1);
if ( this.currentFrame.verifyStackDepth(1) )
{
T result = visitor.translate(i, singleOperand(this.currentFrame.popValue()));
return this.currentFrame.setlocal(idx, result);
}
else
{
return this.currentFrame.setlocal(idx, visitor.valueStackUnderflow(i, 1));
}
}
/**
* Modify a local variable.
* @param i - the Instruction.
* @param idx - the variable's index.
*/
public void modifyLocal(Instruction i, int idx)
{
visitor.translate(i, noOperands());
}
/**
* Pop a value off the value stack and push it on the scope stack.
* @param i - the Instruction (an OP_pushscope).
*/
public T moveValueToScopeStack(Instruction i)
{
if ( this.currentFrame.verifyStackDepth(1) )
return this.currentFrame.pushScope(visitor.translate(i, singleOperand(this.currentFrame.popValue())));
else
return visitor.valueStackUnderflow(i, 1);
}
/**
* Pop a value off the scope stack.
* @param i - the Instruction (an OP_popscope).
*/
public T popscope(Instruction i)
{
if ( this.currentFrame.verifyScopeDepth(1) )
return visitor.translate(i, singleOperand(this.currentFrame.popScope()));
else
return visitor.scopeStackUnderflow(i, 1);
}
/**
* Get a particular scope stack element.
* @param i - the Instruction.
* @param idx - the index of the scope element.
*/
public T getScopeobject(Instruction i, int idx)
{
if ( this.currentFrame.verifyScopeDepth(idx+1) )
return this.currentFrame.pushValue(visitor.translate(i, singleOperand(this.currentFrame.scopes.get(idx))));
else
return visitor.scopeStackUnderflow(i, 1);
}
/**
* Handle the special-case hasnext2 instruction.
* @param i - the Instruction.
*/
public T hasnext2(Instruction i)
{
return null;
}
/**
* Handle the stack-maintenance dup instruction.
* @param i - the Instruction.
*/
public T dup(Instruction i)
{
if ( this.currentFrame.verifyStackDepth(1) )
{
return this.currentFrame.pushValue(visitor.translate(i, singleOperand(this.currentFrame.tos())));
}
else
{
return visitor.valueStackUnderflow(i, 1);
}
}
/**
* Handle the stack-maintenance swap instruction.
* @param i - the Instruction.
*/
public T swap(Instruction i)
{
if ( this.currentFrame.verifyStackDepth(2) )
{
int stackDepth = this.currentFrame.valueStackDepth();
T temp = visitor.translate(i, singleOperand(this.currentFrame.tos()));
this.currentFrame.values.set( stackDepth, this.currentFrame.values.get(stackDepth-1) );
this.currentFrame.values.set( stackDepth - 1, temp);
return this.currentFrame.tos();
}
else
{
return visitor.valueStackUnderflow(i, 2);
}
}
@Override
public boolean visitBlock(IBasicBlock b)
{
Frame frame = getFrame(b);
currentBlock = b;
this.currentFrame = frame;
if ( visitor.visitBlock(b) )
{
return true;
}
else
{
// Tear down.
this.currentFrame = null;
currentBlock = null;
return false;
}
}
@Override
public void visitEndBlock(IBasicBlock b)
{
visitor.visitEnd(b);
this.currentFrame = null;
currentBlock = null;
}
@Override
public void visitEdge(IBasicBlock from, IBasicBlock target)
{
assert getFrame(from) == this.currentFrame;
Frame targetFrame = getFrame(target);
for ( int i = 0; i < this.currentFrame.locals.size(); i++ )
addInitializer(i, targetFrame.locals, this.currentFrame.getlocal(i));
for ( int i = 0; i < this.currentFrame.values.size(); i++ )
addInitializer(i, targetFrame.values, this.currentFrame.values.get(i));
for ( int i = 0; i < this.currentFrame.scopes.size(); i++ )
addInitializer(i, targetFrame.scopes, this.currentFrame.getscopeobject(i));
}
private void addInitializer(final int i, ArrayList<T> target, T value)
{
if ( target.size() <= i )
{
adjustSize(target, i);
target.add(value);
}
else if ( target.get(i) instanceof TreeModelVisitor.IMergePoint<?> )
{
@SuppressWarnings("unchecked")
TreeModelVisitor.IMergePoint<T> phi = (TreeModelVisitor.IMergePoint<T>) target.get(i);
phi.addValue(value);
}
else if ( target.get(i) == null )
{
target.set(i, value);
}
else
{
// TODO: Verify that the existing value and the current value are the same.
}
}
/**
* Build an operands Collection from a single operand.
* @param operand - the operand.
* @return the operand, wrapped in a Collection.
*/
private <X> Collection<X> singleOperand(X operand)
{
ArrayList<X> result = new ArrayList<X>(1);
result.add(operand);
return result;
}
/**
* @return an empty list, approprately cast.
*/
private Collection<T> noOperands()
{
return Collections.emptyList();
}
}
/**
* Adjust the size of a collection of frame elements.
* @param frameElements - the frame elements of interest.
* @param idx - the minimum size of the frame element.
*/
private static <X> void adjustSize(ArrayList<X> frameElements, int idx)
{
while(frameElements.size() < idx )
frameElements.add(null);
}
/**
* Pop a value off a collection of frame elements.
* @param frameElements - the frame elements of interest.
* @return the last element in the collection, which
* has been removed from the collection.
*/
static <X> X popElement(ArrayList<X> frameElements)
{
int lastIdx = frameElements.size() - 1;
return frameElements.remove(lastIdx);
}
}
|
apache/kylin | 35,828 | src/core-metadata/src/main/java/org/apache/kylin/metadata/recommendation/ref/OptRecV2.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.metadata.recommendation.ref;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.JsonUtil;
import org.apache.kylin.guava30.shaded.common.base.Preconditions;
import org.apache.kylin.guava30.shaded.common.collect.BiMap;
import org.apache.kylin.guava30.shaded.common.collect.HashBiMap;
import org.apache.kylin.guava30.shaded.common.collect.Lists;
import org.apache.kylin.guava30.shaded.common.collect.Maps;
import org.apache.kylin.guava30.shaded.common.collect.Sets;
import org.apache.kylin.metadata.cube.model.LayoutEntity;
import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
import org.apache.kylin.metadata.favorite.FavoriteRule;
import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
import org.apache.kylin.metadata.model.AntiFlatChecker;
import org.apache.kylin.metadata.model.ColExcludedChecker;
import org.apache.kylin.metadata.model.ComputedColumnDesc;
import org.apache.kylin.metadata.model.MeasureDesc;
import org.apache.kylin.metadata.model.NDataModel;
import org.apache.kylin.metadata.model.NDataModelManager;
import org.apache.kylin.metadata.model.ParameterDesc;
import org.apache.kylin.metadata.model.TblColRef;
import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
import org.apache.kylin.metadata.recommendation.candidate.RawRecItem;
import org.apache.kylin.metadata.recommendation.candidate.RawRecManager;
import org.apache.kylin.metadata.recommendation.candidate.RawRecSelection;
import org.apache.kylin.metadata.recommendation.entity.CCRecItemV2;
import org.apache.kylin.metadata.recommendation.entity.DimensionRecItemV2;
import org.apache.kylin.metadata.recommendation.entity.MeasureRecItemV2;
import org.apache.kylin.metadata.recommendation.util.RawRecUtil;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Getter
public class OptRecV2 {
private static final int CONSTANT = Integer.MAX_VALUE;
private static final String MEASURE_NAME_PREFIX = "MEASURE_AUTO_";
private static final String CC_AS_DIMENSION_PREFIX = "DIMENSION_AUTO_";
private final String uuid;
private final KylinConfig config;
private final String project;
private final Map<String, RawRecItem> uniqueFlagToRecItemMap;
private final Map<String, RawRecItem> uuidToRecItemMap = new HashMap<>();
private final BiMap<String, Integer> uniqueFlagToId = HashBiMap.create();
private final List<Integer> rawIds = Lists.newArrayList();
// Ref map. If key >= 0, ref in model else ref in raw item.
private final Map<Integer, RecommendationRef> columnRefs = Maps.newHashMap();
private final Map<Integer, RecommendationRef> ccRefs = Maps.newHashMap();
private final Map<Integer, RecommendationRef> dimensionRefs = Maps.newHashMap();
private final Map<Integer, RecommendationRef> measureRefs = Maps.newHashMap();
private final Map<Integer, LayoutRef> additionalLayoutRefs = Maps.newHashMap();
private final Map<Integer, LayoutRef> removalLayoutRefs = Maps.newHashMap();
private final Map<Integer, RawRecItem> rawRecItemMap = Maps.newHashMap();
private final Set<Integer> brokenRefIds = Sets.newHashSet();
@Getter(lazy = true)
private final List<LayoutEntity> layouts = getAllLayouts();
@Getter(lazy = true)
private final NDataModel model = initModel();
@Getter(lazy = true)
private final Map<String, ComputedColumnDesc> projectCCMap = initAllCCMap();
private final AntiFlatChecker antiFlatChecker;
private final ColExcludedChecker excludedChecker;
private final boolean needLog;
public OptRecV2(String project, String uuid, boolean needLog) {
this.needLog = needLog;
this.config = KylinConfig.getInstanceFromEnv();
this.uuid = uuid;
this.project = project;
uniqueFlagToRecItemMap = RawRecManager.getInstance(project).queryNonLayoutRecItems(Sets.newHashSet(uuid));
uniqueFlagToRecItemMap.forEach((k, recItem) -> {
uniqueFlagToId.put(k, recItem.getId());
uuidToRecItemMap.put(recItem.getRecEntity().getUuid(), recItem);
});
antiFlatChecker = new AntiFlatChecker(getModel().getJoinTables(), getModel());
excludedChecker = new ColExcludedChecker(config, project, getModel());
if (!getModel().isBroken()) {
initModelColumnRefs(getModel());
initModelMeasureRefs(getModel());
}
}
public void initRecommendation() {
log.debug("Start to initialize recommendation({}/{}}", project, getUuid());
NDataModel dataModel = getModel();
if (dataModel.isBroken()) {
log.warn("Discard all related recommendations for model({}/{}) is broken.", project, uuid);
RawRecManager.getInstance(project).discardRecItemsOfBrokenModel(dataModel.getUuid());
return;
}
initLayoutRefs(queryBestLayoutRecItems());
initLayoutRefs(queryIndexPlannerRecItems());
initLayoutRefs(queryImportedRawRecItems());
initRemovalLayoutRefs(queryBestRemovalLayoutRecItems());
autoNameForMeasure();
brokenRefIds.addAll(collectBrokenRefs());
log.info("Initialize recommendation({}/{}) successfully.", project, uuid);
}
public List<RawRecItem> filterExcludedRecPatterns(List<RawRecItem> rawRecItems) {
log.debug("Start to initialize recommendation patterns({}/{}}", project, getUuid());
NDataModel dataModel = getModel();
if (dataModel.isBroken()) {
log.warn("Discard all related recommendations for model({}/{}) is broken.", project, uuid);
RawRecManager.getInstance(project).discardRecItemsOfBrokenModel(dataModel.getUuid());
return Lists.newArrayList();
}
List<RawRecItem> reserved = Lists.newArrayList();
initLayoutRefs(rawRecItems);
brokenRefIds.addAll(collectBrokenRefs());
log.info("Initialize recommendation patterns({}/{}) successfully.", project, uuid);
return reserved;
}
private void autoNameForMeasure() {
AtomicInteger maxMeasureIndex = new AtomicInteger(getBiggestAutoMeasureIndex(getModel()));
List<RecommendationRef> allMeasureRefs = getEffectiveRefs(measureRefs);
for (RecommendationRef entry : allMeasureRefs) {
MeasureRef measureRef = (MeasureRef) entry;
String measureName = OptRecV2.MEASURE_NAME_PREFIX + maxMeasureIndex.incrementAndGet();
measureRef.getMeasure().setName(measureName);
measureRef.setName(measureName);
measureRef.setContent(JsonUtil.writeValueAsStringQuietly(measureRef.getMeasure()));
}
}
public int getBiggestAutoMeasureIndex(NDataModel dataModel) {
int biggest = 0;
List<String> allAutoMeasureNames = dataModel.getAllMeasures() //
.stream().map(MeasureDesc::getName) //
.filter(name -> name.startsWith(MEASURE_NAME_PREFIX)) //
.collect(Collectors.toList());
for (String name : allAutoMeasureNames) {
int idx;
try {
String idxStr = name.substring(MEASURE_NAME_PREFIX.length());
idx = StringUtils.isEmpty(idxStr) ? -1 : Integer.parseInt(idxStr);
} catch (NumberFormatException e) {
idx = -1;
}
if (idx > biggest) {
biggest = idx;
}
}
return biggest;
}
/**
* Init ModelColumnRefs and DimensionRefs from model
*/
private void initModelColumnRefs(NDataModel model) {
List<ComputedColumnDesc> ccList = model.getComputedColumnDescs();
Map<String, String> ccNameToExpressionMap = Maps.newHashMap();
ccList.forEach(cc -> ccNameToExpressionMap.put(cc.getFullName(), cc.getExpression()));
for (NDataModel.NamedColumn column : model.getAllNamedColumns()) {
if (!column.isExist()) {
continue;
}
int id = column.getId();
String columnName = column.getAliasDotColumn();
String content = ccNameToExpressionMap.getOrDefault(columnName, columnName);
TblColRef tblColRef = model.getEffectiveCols().get(column.getId());
RecommendationRef columnRef = new ModelColumnRef(column, tblColRef.getDatatype(), content);
if (antiFlatChecker.isColOfAntiLookup(tblColRef) || excludedChecker.isExcludedCol(tblColRef)) {
columnRef.setExcluded(true);
}
columnRefs.put(id, columnRef);
if (column.isDimension()) {
dimensionRefs.put(id, new DimensionRef(columnRef, id, tblColRef.getDatatype(), true));
}
}
}
/**
* Init MeasureRefs from model
*/
private void initModelMeasureRefs(NDataModel model) {
for (NDataModel.Measure measure : model.getAllMeasures()) {
if (measure.isTomb()) {
continue;
}
MeasureRef measureRef = new MeasureRef(measure, measure.getId(), true);
measure.getFunction().getParameters().stream().filter(ParameterDesc::isColumnType).forEach(p -> {
int id = model.getColumnIdByColumnName(p.getValue());
if (antiFlatChecker.isColOfAntiLookup(p.getColRef()) || excludedChecker.isExcludedCol(p.getColRef())) {
measureRef.setExcluded(true);
}
measureRef.getDependencies().add(columnRefs.get(id));
});
measureRefs.put(measure.getId(), measureRef);
}
}
/**
* Init LayoutRefs and they derived dependencies(DimensionRef, MeasureRef, CCRef)
*/
private void initLayoutRefs(List<RawRecItem> bestRecItems) {
bestRecItems.forEach(rawRecItem -> rawIds.add(rawRecItem.getId()));
bestRecItems.forEach(rawRecItem -> rawRecItemMap.put(rawRecItem.getId(), rawRecItem));
bestRecItems.forEach(this::initLayoutRef);
}
private void initRemovalLayoutRefs(List<RawRecItem> removalLayoutRecItems) {
removalLayoutRecItems.forEach(rawRecItem -> {
rawIds.add(rawRecItem.getId());
rawRecItemMap.put(rawRecItem.getId(), rawRecItem);
logTranslateInfo(rawRecItem);
LayoutRef ref = convertToLayoutRef(rawRecItem);
removalLayoutRefs.put(-rawRecItem.getId(), ref);
});
}
private List<RawRecItem> queryBestLayoutRecItems() {
FavoriteRule favoriteRule = FavoriteRuleManager.getInstance(project)
.getOrDefaultByName(FavoriteRule.REC_SELECT_RULE_NAME);
int topN = Integer.parseInt(((FavoriteRule.Condition) favoriteRule.getConds().get(0)).getRightThreshold());
return RawRecSelection.getInstance().selectBestLayout(topN, uuid, project);
}
private List<RawRecItem> queryIndexPlannerRecItems() {
RawRecManager rawRecManager = RawRecManager.getInstance(project);
return rawRecManager.queryIndexPlannerRecItems(project, uuid);
}
private List<RawRecItem> queryImportedRawRecItems() {
return RawRecManager.getInstance(project).queryImportedRawRecItems(project, uuid);
}
private List<RawRecItem> queryBestRemovalLayoutRecItems() {
Map<String, RawRecItem> recItemMap = RawRecManager.getInstance(project).queryNonAppliedLayoutRawRecItems(uuid,
false);
List<RawRecItem> initialRemovalLayoutRecItems = Lists.newArrayList();
recItemMap.forEach((key, value) -> {
if (value.getState() == RawRecItem.RawRecState.INITIAL) {
initialRemovalLayoutRecItems.add(value);
}
});
return initialRemovalLayoutRecItems;
}
private void initLayoutRef(RawRecItem rawRecItem) {
logTranslateInfo(rawRecItem);
LayoutRef ref = convertToLayoutRef(rawRecItem);
additionalLayoutRefs.put(-rawRecItem.getId(), ref);
if (ref.isBroken()) {
return;
}
checkLayoutExists(rawRecItem);
}
private void checkLayoutExists(RawRecItem recItem) {
int negRecItemId = -recItem.getId();
LayoutRef layoutRef = additionalLayoutRefs.get(negRecItemId);
LayoutEntity layout = JsonUtil.deepCopyQuietly(layoutRef.getLayout(), LayoutEntity.class);
List<Integer> colOrder = Lists.newArrayList();
List<Integer> sortColumns = Lists.newArrayList();
List<Integer> partitionColumns = Lists.newArrayList();
List<Integer> shardColumns = Lists.newArrayList();
boolean containNotExistsColumn = translate(colOrder, layout.getColOrder());
if (!containNotExistsColumn) {
translate(sortColumns, layout.getSortByColumns());
translate(shardColumns, layout.getShardByColumns());
translate(partitionColumns, layout.getPartitionByColumns());
layout.setColOrder(colOrder);
layout.setShardByColumns(shardColumns);
layout.setPartitionByColumns(partitionColumns);
long layoutId = getLayouts().stream() //
.filter(layoutEntity -> layoutEntity.equals(layout)) //
.map(LayoutEntity::getId) //
.findFirst().orElse(-1L);
if (layoutId > 0) {
logConflictWithRealEntity(recItem, layoutId);
layoutRef.setExisted(true);
return;
}
}
// avoid the same LayoutRef
for (RecommendationRef entry : getEffectiveRefs(additionalLayoutRefs)) {
if (entry.getId() == negRecItemId) {
continue;
}
if (Objects.equals(entry, layoutRef)) {
logDuplicateRawRecItem(recItem, -entry.getId());
layoutRef.setExisted(true);
return;
}
}
}
// Translate existing column from RawRecItem to column in model.
// Return true if there is a not exist column/measure in cols,
// so we can skip check with layout in index.
private boolean translate(List<Integer> toColIds, List<Integer> fromColIds) {
for (Integer id : fromColIds) {
RecommendationRef ref = dimensionRefs.containsKey(id) ? dimensionRefs.get(id) : measureRefs.get(id);
if (ref == null || !ref.isExisted()) {
return true;
}
toColIds.add(ref.getId());
}
return false;
}
private LayoutRef convertToLayoutRef(RawRecItem rawRecItem) {
int negRecItemId = -rawRecItem.getId();
NDataModel dataModel = getModel();
if (rawRecItem.isOutOfDate(dataModel.getSemanticVersion())) {
logSemanticNotMatch(rawRecItem, dataModel);
return BrokenRefProxy.getProxy(LayoutRef.class, negRecItemId);
}
LayoutEntity layout = RawRecUtil.getLayout(rawRecItem);
if (RawRecItem.RawRecType.REMOVAL_LAYOUT == rawRecItem.getType()) {
NIndexPlanManager indexMgr = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
Map<Long, LayoutEntity> allLayoutsMap = indexMgr.getIndexPlan(uuid).getAllLayoutsMap();
if (!allLayoutsMap.containsKey(layout.getId())) {
return BrokenRefProxy.getProxy(LayoutRef.class, negRecItemId);
}
}
LayoutRef layoutRef = new LayoutRef(layout, negRecItemId, rawRecItem.isAgg());
for (int dependId : rawRecItem.getDependIDs()) {
initDependencyRef(dependId, dataModel);
// normal case: all dependId can be found in dimensionRefs or measureRefs
if (dimensionRefs.containsKey(dependId) || measureRefs.containsKey(dependId)) {
RecommendationRef ref = dimensionRefs.containsKey(dependId) //
? dimensionRefs.get(dependId)
: measureRefs.get(dependId);
if (ref.isBroken()) {
logDependencyLost(rawRecItem, dependId);
return BrokenRefProxy.getProxy(LayoutRef.class, layoutRef.getId());
}
if (ref.isExcluded()) {
layoutRef.setExcluded(true);
}
layoutRef.getDependencies().add(ref);
continue;
}
// abnormal case: maybe this column has been deleted in model, mark this ref to deleted.
if (dependId > 0) {
logDependencyLost(rawRecItem, dependId);
return BrokenRefProxy.getProxy(LayoutRef.class, layoutRef.getId());
}
}
return layoutRef;
}
private void initDependencyRef(int dependId, NDataModel dataModel) {
if (dependId >= 0) {
log.info("DependId({}) is derived from model({}/{})", //
dependId, getProject(), dataModel.getUuid());
return;
}
int rawRecItemId = -dependId;
if (rawRecItemMap.containsKey(rawRecItemId)) {
logRawRecItemHasBeenInitialized(dataModel, rawRecItemId);
return;
}
String uniqueFlag = uniqueFlagToId.inverse().get(rawRecItemId);
RawRecItem rawRecItem = uniqueFlag == null ? null : uniqueFlagToRecItemMap.get(uniqueFlag);
if (rawRecItem == null) {
logRawRecItemNotFoundError(rawRecItemId);
ccRefs.put(dependId, BrokenRefProxy.getProxy(CCRef.class, dependId));
dimensionRefs.put(dependId, BrokenRefProxy.getProxy(DimensionRef.class, dependId));
measureRefs.put(dependId, BrokenRefProxy.getProxy(MeasureRef.class, dependId));
rawRecItemMap.put(dependId, null);
return;
}
switch (rawRecItem.getType()) {
case COMPUTED_COLUMN:
initCCRef(rawRecItem, dataModel);
break;
case DIMENSION:
initDimensionRef(rawRecItem, dataModel);
break;
case MEASURE:
initMeasureRef(rawRecItem, dataModel);
break;
default:
throw new IllegalStateException("id: " + rawRecItemId + " type is illegal");
}
rawRecItemMap.put(rawRecItemId, rawRecItem);
}
private void initCCRef(RawRecItem rawRecItem, NDataModel dataModel) {
logTranslateInfo(rawRecItem);
int negRecItemId = -rawRecItem.getId();
if (rawRecItem.isOutOfDate(dataModel.getSemanticVersion())) {
logSemanticNotMatch(rawRecItem, dataModel);
ccRefs.put(negRecItemId, BrokenRefProxy.getProxy(CCRef.class, negRecItemId));
return;
}
Map<String, ComputedColumnDesc> ccMapOnModel = Maps.newHashMap();
dataModel.getComputedColumnDescs().forEach(cc -> ccMapOnModel.put(cc.getInnerExpression(), cc));
ComputedColumnDesc cc = RawRecUtil.getCC(rawRecItem);
CCRef ccRef = new CCRef(cc, negRecItemId);
if (ccMapOnModel.containsKey(cc.getInnerExpression())) {
ComputedColumnDesc existCC = ccMapOnModel.get(cc.getInnerExpression());
ccRef = new CCRef(existCC, negRecItemId);
ccRef.setExisted(true);
ccRef.setCrossModel(false);
dataModel.getEffectiveCols().forEach((key, tblColRef) -> {
if (tblColRef.getIdentity().equalsIgnoreCase(existCC.getFullName())) {
ccRefs.put(negRecItemId, columnRefs.get(key));
}
});
return;
} else if (getProjectCCMap().containsKey(cc.getInnerExpression())) {
ComputedColumnDesc existCC = getProjectCCMap().get(cc.getInnerExpression());
if (existCC.getTableIdentity().equalsIgnoreCase(cc.getTableIdentity())) {
ccRef = new CCRef(existCC, negRecItemId);
ccRef.setExisted(false);
ccRef.setCrossModel(true);
} else {
ccRef = new CCRef(cc, negRecItemId);
ccRef.setExisted(false);
ccRef.setCrossModel(false);
}
}
int[] dependIds = rawRecItem.getDependIDs();
for (int dependId : dependIds) {
TranslatedState state = initDependencyWithState(dependId, ccRef);
if (state == TranslatedState.BROKEN) {
logDependencyLost(rawRecItem, dependId);
ccRefs.put(negRecItemId, BrokenRefProxy.getProxy(CCRef.class, negRecItemId));
return;
}
}
CCRecItemV2 recEntity = (CCRecItemV2) rawRecItem.getRecEntity();
int[] newDependIds = recEntity.genDependIds(dataModel);
if (!Arrays.equals(newDependIds, rawRecItem.getDependIDs())) {
logIllegalRawRecItem(rawRecItem, rawRecItem.getDependIDs(), newDependIds);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
ccRefs.put(negRecItemId, ccRef);
checkCCExist(rawRecItem);
}
private void checkCCExist(RawRecItem recItem) {
int negRecItemId = -recItem.getId();
RecommendationRef ref = ccRefs.get(negRecItemId);
if (ref.isExisted() || !(ref instanceof CCRef)) {
return;
}
// check in other raw items.
CCRef ccRef = (CCRef) ref;
for (RecommendationRef entry : getEffectiveRefs(ccRefs)) {
if (entry.getId() == negRecItemId) {
// pass itself
continue;
}
CCRef anotherCCRef = (CCRef) entry;
if (ccRef.isIdentical(anotherCCRef)) {
logDuplicateRawRecItem(recItem, -entry.getId());
ccRef.setExisted(true);
ccRefs.put(negRecItemId, ccRefs.get(entry.getId()));
return;
}
}
}
private void initDimensionRef(RawRecItem rawRecItem, NDataModel dataModel) {
logTranslateInfo(rawRecItem);
// check semanticVersion
int negRecItemId = -rawRecItem.getId();
if (rawRecItem.isOutOfDate(dataModel.getSemanticVersion())) {
logSemanticNotMatch(rawRecItem, dataModel);
dimensionRefs.put(negRecItemId, BrokenRefProxy.getProxy(DimensionRef.class, negRecItemId));
return;
}
DimensionRef dimensionRef = new DimensionRef(negRecItemId);
final int[] dependIDs = rawRecItem.getDependIDs();
Preconditions.checkArgument(dependIDs.length == 1);
int dependID = dependIDs[0];
TranslatedState state = initDependencyWithState(dependID, dimensionRef);
if (state == TranslatedState.BROKEN) {
logDependencyLost(rawRecItem, dependID);
dimensionRefs.put(negRecItemId, BrokenRefProxy.getProxy(DimensionRef.class, negRecItemId));
return;
}
DimensionRecItemV2 recEntity = (DimensionRecItemV2) rawRecItem.getRecEntity();
if (recEntity.getUniqueContent() == null) {
logIncompatibleRawRecItem(rawRecItem);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
int[] newDependIds = recEntity.genDependIds(uuidToRecItemMap, recEntity.getUniqueContent(), dataModel);
if (!Arrays.equals(newDependIds, rawRecItem.getDependIDs())) {
logIllegalRawRecItem(rawRecItem, rawRecItem.getDependIDs(), newDependIds);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
dimensionRef.init();
if (dependID < 0) {
String dimRefName = dimensionRef.getName();
dimensionRef.setName(dimRefName.replace(ComputedColumnUtil.CC_NAME_PREFIX, CC_AS_DIMENSION_PREFIX));
}
dimensionRefs.put(negRecItemId, reuseIfAvailable(dimensionRef));
checkDimensionExist(rawRecItem);
}
private DimensionRef reuseIfAvailable(DimensionRef dimensionRef) {
RecommendationRef recommendationRef = dimensionRef.getDependencies().get(0);
if (recommendationRef instanceof ModelColumnRef) {
NDataModel.NamedColumn column = ((ModelColumnRef) recommendationRef).getColumn();
if (column.isDimension()) {
dimensionRef = (DimensionRef) dimensionRefs.get(column.getId());
}
}
return dimensionRef;
}
private void checkDimensionExist(RawRecItem recItem) {
int negRecItemId = -recItem.getId();
RecommendationRef dimensionRef = dimensionRefs.get(negRecItemId);
// check two raw recommendations share same content
for (RecommendationRef entry : getEffectiveRefs(dimensionRefs)) {
if (entry.getId() == negRecItemId) {
// pass itself
continue;
}
// if reference of this raw recommendation has been approved, forward to the approved one
if (Objects.equals(entry, dimensionRef)) {
logDuplicateRawRecItem(recItem, -entry.getId());
dimensionRef.setExisted(true);
dimensionRefs.put(negRecItemId, dimensionRefs.get(entry.getId()));
return;
}
}
}
private void initMeasureRef(RawRecItem rawRecItem, NDataModel dataModel) {
logTranslateInfo(rawRecItem);
int negRecItemId = -rawRecItem.getId();
if (rawRecItem.isOutOfDate(dataModel.getSemanticVersion())) {
logSemanticNotMatch(rawRecItem, dataModel);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
RecommendationRef ref = new MeasureRef(RawRecUtil.getMeasure(rawRecItem), negRecItemId, false);
for (int value : rawRecItem.getDependIDs()) {
TranslatedState state = initDependencyWithState(value, ref);
if (state == TranslatedState.BROKEN) {
logDependencyLost(rawRecItem, value);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
}
MeasureRecItemV2 recEntity = (MeasureRecItemV2) rawRecItem.getRecEntity();
if (recEntity.getUniqueContent() == null) {
logIncompatibleRawRecItem(rawRecItem);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
int[] newDependIds = recEntity.genDependIds(uuidToRecItemMap, recEntity.getUniqueContent(), dataModel);
if (!Arrays.equals(newDependIds, rawRecItem.getDependIDs())) {
logIllegalRawRecItem(rawRecItem, rawRecItem.getDependIDs(), newDependIds);
measureRefs.put(negRecItemId, BrokenRefProxy.getProxy(MeasureRef.class, negRecItemId));
return;
}
measureRefs.put(negRecItemId, ref);
checkMeasureExist(rawRecItem);
}
private void checkMeasureExist(RawRecItem recItem) {
int negRecItemId = -recItem.getId();
MeasureRef measureRef = (MeasureRef) measureRefs.get(negRecItemId);
for (RecommendationRef entry : getLegalRefs(measureRefs)) {
if (entry.getId() == negRecItemId) {
// pass itself
continue;
}
/* Parameters of measure can only ordinary columns or computed columns,
* so if the function name and dependencies of two measureRefs are the same,
* they are identical, then the second measureRef should forward to the first one.
*/
if (measureRef.isIdentical(entry)) {
logDuplicateRawRecItem(recItem, -entry.getId());
measureRef.setExisted(true);
measureRefs.put(negRecItemId, measureRefs.get(entry.getId()));
return;
}
}
}
private TranslatedState initDependencyWithState(int dependId, RecommendationRef ref) {
if (dependId == OptRecV2.CONSTANT) {
return TranslatedState.CONSTANT;
}
NDataModel dataModel = getModel();
initDependencyRef(dependId, dataModel);
if (columnRefs.containsKey(dependId)) {
RecommendationRef e = columnRefs.get(dependId);
if (e.isBroken()) {
return TranslatedState.BROKEN;
} else if (e.isExcluded()) {
ref.setExcluded(true);
}
ref.getDependencies().add(e);
} else if (ccRefs.containsKey(dependId)) {
RecommendationRef e = ccRefs.get(dependId);
if (e.isBroken()) {
return TranslatedState.BROKEN;
} else if (e.isExcluded()) {
ref.setExcluded(true);
}
ref.getDependencies().add(e);
} else {
return TranslatedState.BROKEN;
}
return TranslatedState.NORMAL;
}
private List<RecommendationRef> getEffectiveRefs(Map<Integer, ? extends RecommendationRef> refMap) {
List<RecommendationRef> effectiveRefs = Lists.newArrayList();
refMap.forEach((key, ref) -> {
if (ref.isEffective()) {
effectiveRefs.add(ref);
}
});
effectiveRefs.sort(Comparator.comparingInt(RecommendationRef::getId));
return effectiveRefs;
}
private List<RecommendationRef> getLegalRefs(Map<Integer, ? extends RecommendationRef> refMap) {
Set<RecommendationRef> effectiveRefs = Sets.newHashSet();
refMap.forEach((key, ref) -> {
if (ref.isLegal()) {
effectiveRefs.add(ref);
}
});
List<RecommendationRef> effectiveRefList = Lists.newArrayList(effectiveRefs);
effectiveRefList.sort(Comparator.comparingInt(RecommendationRef::getId));
return effectiveRefList;
}
private Set<Integer> collectBrokenRefs() {
Set<Integer> brokenIds = Sets.newHashSet();
additionalLayoutRefs.forEach((id, ref) -> {
if (ref.isBroken() && id < 0) {
brokenIds.add(-id);
}
});
removalLayoutRefs.forEach((id, ref) -> {
if (ref.isBroken() && id < 0) {
brokenIds.add(-id);
}
});
fillBrokenRef(brokenIds, ccRefs);
fillBrokenRef(brokenIds, dimensionRefs);
fillBrokenRef(brokenIds, measureRefs);
return brokenIds;
}
private void fillBrokenRef(Set<Integer> brokenIds, Map<Integer, RecommendationRef> refs) {
refs.forEach((id, ref) -> {
if (ref.isBroken() && id < 0) {
brokenIds.add(-id);
}
});
}
private Map<String, ComputedColumnDesc> initAllCCMap() {
Map<String, ComputedColumnDesc> ccMap = Maps.newHashMap();
NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.readSystemKylinConfig(), project);
List<NDataModel> allModels = modelManager.listAllModels();
allModels.stream().filter(m -> !m.isBroken()).forEach(m -> {
List<ComputedColumnDesc> ccList = m.getComputedColumnDescs();
for (ComputedColumnDesc cc : ccList) {
ccMap.putIfAbsent(cc.getInnerExpression(), cc);
}
});
return ccMap;
}
private NDataModel initModel() {
NDataModelManager modelManager = NDataModelManager.getInstance(Objects.requireNonNull(config), project);
NDataModel dataModel = modelManager.getDataModelDesc(getUuid());
return dataModel.isBroken() ? dataModel : modelManager.copyForWrite(dataModel);
}
private List<LayoutEntity> getAllLayouts() {
NIndexPlanManager indexPlanManager = NIndexPlanManager.getInstance(Objects.requireNonNull(config), project);
return indexPlanManager.getIndexPlan(getUuid()).getAllLayouts();
}
private void logRawRecItemHasBeenInitialized(NDataModel dataModel, int rawRecItemId) {
log.info("RawRecItem({}) already initialized for Recommendation({}/{})", //
rawRecItemId, getProject(), dataModel.getUuid());
}
private void logRawRecItemNotFoundError(int rawRecItemId) {
log.error("RawRecItem({}) is not found in recommendation({}/{})", rawRecItemId, project, getUuid());
}
private void logTranslateInfo(RawRecItem recItem) {
String type;
switch (recItem.getType()) {
case MEASURE:
type = "MeasureRef";
break;
case COMPUTED_COLUMN:
type = "CCRef";
break;
case ADDITIONAL_LAYOUT:
case REMOVAL_LAYOUT:
type = "LayoutRef";
break;
case DIMENSION:
type = "DimensionRef";
break;
default:
throw new IllegalArgumentException();
}
log.info("RawRecItem({}) will be translated to {} in Recommendation({}/{})", //
recItem.getId(), type, project, getUuid());
}
private void logDependencyLost(RawRecItem rawRecItem, int dependId) {
log.info("RawRecItem({}) lost dependency of {} in recommendation({}/{})", //
rawRecItem.getId(), dependId, getProject(), getUuid());
}
private void logSemanticNotMatch(RawRecItem rawRecItem, NDataModel dataModel) {
log.info("RawRecItem({}) has an outdated semanticVersion({}) less than {} in recommendation({}/{})",
rawRecItem.getId(), rawRecItem.getSemanticVersion(), //
dataModel.getSemanticVersion(), getProject(), getUuid());
}
private void logConflictWithRealEntity(RawRecItem recItem, long existingId) {
log.info("RawRecItem({}) encounters an existing {}({}) in recommendation({}/{})", //
recItem.getId(), recItem.getType().name(), existingId, getProject(), getUuid());
}
private void logDuplicateRawRecItem(RawRecItem recItem, int anotherRecItemId) {
log.info("RawRecItem({}) duplicates with another RawRecItem({}) in recommendation({}/{})", //
recItem.getId(), anotherRecItemId, getProject(), getUuid());
}
private void logIllegalRawRecItem(RawRecItem recItem, int[] oldDependIds, int[] newDependIds) {
log.error("RawRecItem({}) illegal now for dependIds changed, old dependIds({}), new dependIds({})",
recItem.getId(), Arrays.toString(oldDependIds), Arrays.toString(newDependIds));
}
private void logIncompatibleRawRecItem(RawRecItem recItem) {
log.info("RawRecItem({}) incompatible now for uniqueContent missing", recItem.getId());
}
private enum TranslatedState {
CONSTANT, BROKEN, NORMAL, UNDEFINED
}
}
|
google/sagetv | 34,857 | java/sage/SFIRTuner.java | /*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage;
/*
* This class is a wrapper for the IRTuner dll interface from SourceForge.
*/
public class SFIRTuner implements Runnable
{
static
{
//sage.Native.loadLibrary("Sage");
}
public static final String REMOTE_DIR = "remote_dir";
public static final String IRTUNE_REPEAT_FACTOR = "irtune_repeat_factor";
public static final String IRTUNE_PREFIX_EXTRA_DELAY = "irtune_prefix_extra_delay";
public static final String USB_IRTUNE_REPEAT_FACTOR = "usb_irtune_repeat_factor";
public static final String IRTUNE_GLOBAL_PREROLL = "actisys_irtune_global_preroll";
public static final String USBIRTUNE_GLOBAL_PREROLL = "usbuirt_irtune_global_preroll";
public static final String ASYNC_TUNING = "async_tuning";
public static class Pattern
{
public int bit_length;
public int length;
public char r_flag;
public byte[] bytes;
public Pattern next;
public String toString()
{
return "Pattern[bit_length=" + bit_length + ", length=" + length + ", r_flag=" + r_flag +
", next=" + next + ']';
}
}
public static class Command
{
public String name;
public Pattern pattern;
public Command next;
public String toString()
{
return "Command[name=" + name + ", pattern=" + pattern + ", next=" + next + ']';
}
}
public static class Remote
{
public String name;
public long carrier_freq;
public long bit_time;
public Command command;
public Remote next;
// SageTV added fields
public int channelDigits;
public String confirmCmd;
public int buttonDelay;
public int sequenceDelay;
public String prefixCmd;
public String toString()
{
return "Remote[name=" + name + ", carrier=" + carrier_freq + ", bit_time=" + bit_time +
", command=" + command + ", next=" + next + ']';
}
}
public static native String[] getValidDeviceFiles(String[] tryFiles);
public static native String[] getPrettyDeviceNames(String[] validFiles);
public static String getSFIRTunerPluginDir()
{
String theDir = null;
if(Sage.WINDOWS_OS)
theDir = Sage.readStringValue(Sage.HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common", "IRTunerPluginsDir");
else if(Sage.LINUX_OS)
theDir = Sage.get("irtuner_plugins_dir", "irtunerplugins");
// this guarantees we return a valid path (user.dir or Plug-Ins on Mac OS X)
if(theDir == null) theDir = Sage.getPath("plugins");
// System.out.println("getSFIRTunerPluginDir: dir = " + theDir);
return theDir;
}
public static String getPrettyNameForFile(String inFilename)
{
java.io.File testFile = new java.io.File(inFilename);
if (!testFile.isFile())
{
// Look in the global directory
String globalIRDir = getSFIRTunerPluginDir();
if (globalIRDir != null)
testFile = new java.io.File(globalIRDir, inFilename);
}
inFilename = testFile.getAbsolutePath();
String[] rv = getPrettyDeviceNames(new String[] { inFilename });
return (rv != null && rv.length > 0) ? rv[0] : inFilename;
}
private static final java.util.Map prettyNameMap = java.util.Collections.synchronizedMap(new java.util.HashMap());
public static String getFileForPrettyDeviceName(String prettyName)
{
if (prettyNameMap.get(prettyName) != null)
return prettyNameMap.get(prettyName).toString();
String irPluginDir = getSFIRTunerPluginDir();
java.io.File[] suspectDLLFiles = new java.io.File(irPluginDir).
listFiles(new java.io.FilenameFilter(){
public boolean accept(java.io.File dir,String filename){return filename.toLowerCase().endsWith(Sage.WINDOWS_OS ? ".dll" :
(Sage.LINUX_OS ? ".so" : ".dylib"));}});
String[] suspectDLLs = (suspectDLLFiles == null) ? Pooler.EMPTY_STRING_ARRAY : new String[suspectDLLFiles.length];
for (int i = 0; i < suspectDLLs.length; i++)
suspectDLLs[i] = suspectDLLFiles[i].getAbsolutePath();
String[] irDevFiles = getValidDeviceFiles(suspectDLLs);
String[] allPretty = getPrettyDeviceNames(irDevFiles);
for (int i = 0; i < allPretty.length; i++)
if (allPretty[i].equals(prettyName))
{
prettyNameMap.put(prettyName, irDevFiles[i]);
return irDevFiles[i];
}
prettyNameMap.put(prettyName, prettyName);
return prettyName;
}
/*
* Cmd line params
* -r name : create remote with name, calculates bitrate & carrier
* -c rname cname : record command 'cname' to the remote 'rname'
* -l name : load the remotes from this filename
* -s name : save the remotes to this filename
* -p rname cname repeat : play the command 'cname' from remote 'rname' repeat times
* -w time : wait for time seconds
* -i : run initDevice
* -x comport : open comport #
*/
/*public static void main(String[] args)
{
if (args.length == 0)
{
System.out.println("Usage:");
System.out.println("-r name : create remote with name, calculates bitrate & carrier");
System.out.println("-c rname cname : record command 'cname' to the remote 'rname'");
System.out.println("-l name : load the remotes from this filename");
System.out.println("-s name : save the remotes to this filename");
System.out.println("-p rname cname repeat : play the command 'cname' from remote 'rname' repeat times");
System.out.println("-w time : wait for time seconds");
System.out.println("-i : run initdevice");
System.out.println("-x comport : open comport #");
return;
}
String[] dllFiles = new java.io.File(System.getProperty("user.dir")).list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".dll");}
});
System.out.println("dllFiles=" + java.util.Arrays.asList(dllFiles));
String[] validFiles = getValidDeviceFiles(dllFiles);
System.out.println("validFiles=" + java.util.Arrays.asList(validFiles));
SFIRTuner tuney = new SFIRTuner(validFiles[0]);
for (int i = 0; i < args.length; i++)
{
if (args[i].equals("-r"))
{
String rname = args[++i];
System.out.println("Create remote named " + rname);
long carrier=0, bitrate=0;
if (tuney.needCarrierFrequency())
{
while (carrier == 0)
{
System.out.println("Hold a remote button down for a while. Scanning for frequency...");
carrier = tuney.findCarrierFrequency();
System.out.println("Carrier frequency=" + carrier);
if (carrier > 100000)
{
System.out.println("BAD CARRIER, do it again!");
carrier = 0;
}
}
}
if (tuney.needBitrate())
{
System.out.println("Hold a remote button down for a while. Calculating bitrate...");
bitrate = tuney.findBitRate();
System.out.println("Bitrate=" + bitrate);
}
Remote newRem = tuney.createRemote(rname, carrier, bitrate, null);
tuney.addRemote(newRem);
System.out.println("Created & added remote " + newRem);
}
else if (args[i].equals("-c"))
{
String rname = args[++i];
String cname = args[++i];
Remote rem = tuney.findRemote(rname);
if (rem == null)
{
System.out.println("Can't find remote named:" + rname);
continue;
}
System.out.println("Hit the " + cname + " key for remote " + rname);
Command cmd = tuney.recordCommand(cname);
System.out.println("Recorded command:" + cmd);
tuney.addCommand(rem, cmd);
}
else if (args[i].equals("-l"))
{
String fname = args[++i];
System.out.println("Loading remotes from filename:" + fname);
tuney.loadRemotes(fname);
System.out.println("Remotes=" + tuney.baseRemote);
}
else if (args[i].equals("-s"))
{
String fname = args[++i];
System.out.println("Saving remotes to filename:" + fname);
tuney.saveRemotes(fname);
System.out.println("Remotes=" + tuney.baseRemote);
}
else if (args[i].equals("-p"))
{
String rname = args[++i];
String cname = args[++i];
int rep = Integer.parseInt(args[++i]);
System.out.println("Starting to play command " + cname + " for remote " + rname + " " + rep + " times");
tuney.playCommand(tuney.findRemote(rname), cname, rep);
System.out.println("Done playing command");
}
else if (args[i].equals("-w"))
{
try{Thread.sleep(1000*Integer.parseInt(args[++i]));}catch(Exception e){}
}
else if (args[i].equals("-i"))
tuney.initDevice();
else if (args[i].equals("-x"))
{
int comport = Integer.parseInt(args[++i]);
boolean openD = tuney.openDevice(comport);
if (!openD)
{
System.out.println("Failed opening COM port. Trying again!");
tuney.closeDevice();
openD = tuney.openDevice(comport);
if (!openD)
{
System.out.println("Failed opening COM port. Darn!");
return;
}
}
System.out.println("Opened com port " + openD);
}
}
tuney.closeDevice();
System.out.println("Closed COM port");
}*/
private static java.util.Vector loadedTuneys = new java.util.Vector();
/*
* The carrier & bit timing for the hardware gets set in initDevice. The values
* it uses are from the last remote it loaded via a call to loadRemotes
*/
public SFIRTuner(String inFilename)
{
java.io.File testFile = new java.io.File(inFilename);
String globalIRDir = getSFIRTunerPluginDir();
if (!testFile.isFile())
{
// Look in the global directory
if (globalIRDir != null)
testFile = new java.io.File(globalIRDir, inFilename);
}
inFilename = testFile.getAbsolutePath();
if (Sage.WINDOWS_OS && getValidDeviceFiles(new String[] { inFilename} ).length == 0)
{
System.err.println("Invalid device filename for IRTuner: " + inFilename);
}
devFilename = inFilename;
if (globalIRDir != null)
remoteDir = new java.io.File(globalIRDir, "RemoteCodes");
else
remoteDir = new java.io.File(Sage.getPath("plugins")/*System.getProperty("user.dir")*/, "RemoteCodes");
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
remoteDir.mkdirs();
String[] prettyNames = getPrettyDeviceNames(new String[] { devFilename });
if (prettyNames != null && prettyNames.length > 0)
{
remoteDir2 = new java.io.File(remoteDir, prettyNames[0]);
remoteDir2.mkdirs();
}
}
else
remoteDir2 = new java.io.File(remoteDir, devFilename);
asyncTuning = Sage.getBoolean(ASYNC_TUNING, true);
tuneVec = new java.util.Vector();
globalPreroll = 0;
initialize();
}
private void initialize()
{
checkForTuneyConflicts();
init0();
alive = true;
if (asyncTuning)
{
asyncThread = new Thread(this, "AsyncTuner");
asyncThread.setDaemon(true);
asyncThread.setPriority(Thread.MAX_PRIORITY - 3);
asyncThread.start();
if (Sage.WINDOWS_OS)
{
globalPreroll = (devFilename.toLowerCase().indexOf("uu_irsage") == -1) ? Sage.getLong(IRTUNE_GLOBAL_PREROLL, 2000L) :
Sage.getLong(USBIRTUNE_GLOBAL_PREROLL, 150);
}
else
globalPreroll = Sage.getLong(USBIRTUNE_GLOBAL_PREROLL, 0);
}
loadedTuneys.add(this);
}
private void checkForTuneyConflicts()
{
// We can't have more than one Actisys plugin open at once, so shut down any others if this is one
if (devFilename.startsWith("as_ir200l"))
{
for (int i = 0; i < loadedTuneys.size(); i++)
{
SFIRTuner tuney = (SFIRTuner) loadedTuneys.get(i);
if (tuney.devFilename.equals(devFilename))
{
System.out.println("SFIRTuner shutting down tuning plugin due to conflict");
tuney.goodbye();
}
}
}
}
public boolean isConfigurable()
{
return !canMacroTune();
}
public void run()
{
Object[] tuneData = null;
while (alive)
{
String nextTune = null;
String nextRemote = null;
synchronized (tuneVec)
{
if (tuneData != null)
{
tuneVec.remove(tuneData);
tuneData = null;
}
if (tuneVec.isEmpty())
{
tuneVec.notifyAll();
try{tuneVec.wait(0);}catch(InterruptedException e){}
continue;
}
tuneData = (Object[]) tuneVec.lastElement();
nextRemote = (String) tuneData[0];
nextTune = (String) tuneData[1];
// Only send the last channel change command for a given remote since any prior
// ones will be overidden by it.
for (int i = tuneVec.size() - 2; i >= 0; i--)
{
Object[] tempTuneData = (Object[]) tuneVec.get(i);
if (tempTuneData[0].equals(nextRemote))
tuneVec.removeElementAt(i);
}
}
if (globalPreroll != 0 && !canMacroTune())
try{ Thread.sleep(globalPreroll); } catch(Exception e){}
playTuneString(nextRemote, nextTune);
}
}
public String[] getRemoteNames()
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.util.ArrayList rv = new java.util.ArrayList();
String[] irFiles = remoteDir.list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".ir") &&
hasRemoteFileData(filename.substring(0, filename.length() - 3));}
});
if (irFiles != null)
{
for (int i = 0; i < irFiles.length; i++)
rv.add(irFiles[i].substring(0, irFiles[i].length() - 3));
}
irFiles = remoteDir2.list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".ir") &&
hasRemoteFileData(filename.substring(0, filename.length() - 3));}
});
if (irFiles != null)
{
for (int i = 0; i < irFiles.length; i++)
rv.add(irFiles[i].substring(0, irFiles[i].length() - 3));
}
return (String[]) rv.toArray(Pooler.EMPTY_STRING_ARRAY);
}
else
{
// This means load the whole remote list
synchronized (this)
{
loadRemotes(null);
java.util.ArrayList rv = new java.util.ArrayList();
Remote tempRemote = baseRemote;
while (tempRemote != null)
{
rv.add(tempRemote.name);
tempRemote = tempRemote.next;
}
baseRemote = null; // since they don't load fully this way
String[] rvArray = (String[]) rv.toArray(Pooler.EMPTY_STRING_ARRAY);
java.util.Arrays.sort(rvArray);
return rvArray;
}
}
}
public synchronized void goodbye()
{
boolean needToKill = alive; // Don't close the hardware if it's not active or it may crash!
alive = false;
synchronized (tuneVec)
{
tuneVec.notifyAll();
}
if (needToKill)
{
closeDevice();
goodbye0();
}
loadedTuneys.remove(this);
}
private void addCommand(Remote daRemote, Command addMe)
{
Command cmdList = daRemote.command;
if (cmdList == null)
{
daRemote.command = addMe;
return;
}
while (cmdList.next != null)
cmdList = cmdList.next;
cmdList.next = addMe;
}
/* private void addRemote(Remote addMe)
{
if (baseRemote == null)
baseRemote = addMe;
else
{
Remote currRemote = baseRemote;
while (currRemote.next != null)
currRemote = currRemote.next;
currRemote.next = addMe;
}
}
/* private Remote findRemote(String name)
{
Remote rem = baseRemote;
while (rem != null)
{
if (name.equals(rem.name))
return rem;
rem = rem.next;
}
return null;
}
*/
private native void closeDevice();
private Remote createRemote(String remoteName, long carrier, long bitrate, Command commands)
{
Remote rv = new Remote();
rv.name = remoteName;
rv.carrier_freq = carrier;
rv.bit_time = bitrate;
rv.command = commands;
rv.buttonDelay = Sage.WINDOWS_OS ? 600 : 800;
rv.sequenceDelay = 800;
rv.channelDigits = 3;
return rv;
}
public synchronized void playCommand(String remoteName, String cmdName, int repeats, boolean sleepAfter)
{
if (!ensureRemoteLoaded(remoteName)) return;
long waitNow = baseRemote.buttonDelay + baseRemote.sequenceDelay - (Sage.eventTime() - lastIRTime);
if (waitNow > 0)
{
try
{
Thread.sleep(waitNow);
} catch(Exception e){}
}
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so") && UIManager.getLocalUI() != null)
{
// Sync the PVR150 xmt & recv
if (UIManager.getLocalUI().getRouter() == null)
{
playCommand(baseRemote, cmdName, repeats);
// if (sleepAfter) // We get I2C failures if we don't wait at least 350 msec after a send
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
}
else
{
synchronized (UIManager.getLocalUI().getRouter().getDefaultInputPlugin())
{
//System.out.println("PVR150 SyncBlock Enter");
playCommand(baseRemote, cmdName, repeats);
// if (sleepAfter) // We get I2C failures if we don't wait at least 350 msec after a send
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
//System.out.println("PVR150 SyncBlock Exit");
}
}
}
else
{
playCommand(baseRemote, cmdName, repeats);
if (sleepAfter)
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
}
}
private int getRepeatFactor()
{
if (devFilename.toLowerCase().indexOf("uu_irsage") != -1)
return Sage.getInt(USB_IRTUNE_REPEAT_FACTOR, 2);
else
return Sage.getInt(IRTUNE_REPEAT_FACTOR, Sage.LINUX_OS ? 1 : 2);
}
public void playTuneString(String remoteName, String cmdString)
{
playTuneString(remoteName, cmdString, false);
}
public void playTuneString(String remoteName, String cmdString, boolean forceSynchronous)
{
if (cmdString == null || cmdString.length() == 0) return;
if (!forceSynchronous && asyncTuning && Thread.currentThread() != asyncThread)
{
synchronized (tuneVec)
{
tuneVec.addElement(new Object[] { remoteName, cmdString });
tuneVec.notifyAll();
}
return;
}
synchronized (this)
{
if (Sage.DBG) System.out.println("Playing IR tune command of " + cmdString);
if (!ensureRemoteLoaded(remoteName)) return;
if (canMacroTune())
{
try {
int cmdNum = Integer.parseInt(cmdString);
} catch (Exception e) {
String cmdStringNumeric = cmdString.replaceAll("\\D", ""); // remove all non-digits
cmdString = cmdStringNumeric;
if (Sage.DBG) System.out.println("IR tune command was not all digits; converted to: " + cmdString);
}
try {
macroTune(Integer.parseInt(cmdString));
}
catch (Exception e){
if (Sage.DBG) System.out.println("Exception in playTuneString");
}
}
else
{
// To deal with reinitializing the IR XMT for the 150 on Linux after the receive fails
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so"))
{
closeDevice();
openDevice(currPortNum);
// Wait for the init to complete
try{Thread.sleep(Sage.getInt("linux/pvr150_ir_reset_wait", 750));}catch (Exception e){}
}
try
{
// channelDigits corresponds to 'Digits per Channel' in the UI
if (baseRemote.channelDigits > 0)
{
// cmdString may include non-numeric chars, eg. '-' or '.'
int digitCnt = 0;
for (int i = 0; i < cmdString.length(); i++)
{
if (Character.isDigit(cmdString.charAt(i)))
digitCnt++;
}
while (digitCnt < baseRemote.channelDigits)
{
cmdString = "0" + cmdString;
digitCnt++;
}
}
}catch (Exception e){}
if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
{
playCommand(remoteName, baseRemote.prefixCmd, getRepeatFactor(), true);
long extraPrefixDelay = Sage.getLong(IRTUNE_PREFIX_EXTRA_DELAY, 0);
if (extraPrefixDelay > 0)
{
try{Thread.sleep(extraPrefixDelay);}catch(Exception e){}
}
}
boolean needsConfirm = baseRemote.confirmCmd != null && baseRemote.confirmCmd.length() > 0;
for (int i = 0; i < cmdString.length(); i++)
playCommand(remoteName, "" + cmdString.charAt(i), getRepeatFactor(),
needsConfirm ? true : (i < cmdString.length() - 1));
if (needsConfirm)
playCommand(remoteName, baseRemote.confirmCmd, getRepeatFactor(), false);
lastIRTime = Sage.eventTime();
}
}
}
public void waitForCompletion()
{
synchronized (tuneVec)
{
while (!tuneVec.isEmpty())
{
try
{
tuneVec.wait(5000);
}
catch (InterruptedException e){}
}
}
}
public synchronized String addNewRemote(String name)
{
name = createValidRemoteName(name);
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
if (new java.io.File(remoteDir2, name + ".ir").isFile()) return null;
}
if (Sage.DBG) System.out.println("Creating remote named " + name);
long carrier=0, bitrate=0;
if (needCarrierFrequency())
{
while (carrier == 0)
{
if (Sage.DBG) System.out.println("Hold a remote button down for a while. Scanning for frequency...");
carrier = findCarrierFrequency();
if (Sage.DBG) System.out.println("Carrier frequency=" + carrier);
if (carrier > 100000)
{
if (Sage.DBG) System.out.println("BAD CARRIER, do it again!");
carrier = 0;
}
}
}
if (needBitrate())
{
if (Sage.DBG) System.out.println("Hold a remote button down for a while. Calculating bitrate...");
bitrate = findBitRate();
if (Sage.DBG) System.out.println("Bitrate=" + bitrate);
}
Remote newRem = createRemote(name, carrier, bitrate, null);
baseRemote = newRem;
saveRemotes(new java.io.File(remoteDir2, baseRemote.name + ".ir").toString());
return name;
}
private static String createValidRemoteName(String tryMe)
{
int len = tryMe.length();
StringBuffer sb = new StringBuffer(len);
for (int i = 0; i < len; i++)
{
char c = tryMe.charAt(i);
if (Character.isLetterOrDigit(c))
sb.append(c);
}
return sb.toString();
}
public synchronized boolean recordNewCommand(String remoteName, String cmdName)
{
if (!ensureRemoteLoaded(remoteName)) return false;
// If it's already there, remove it so we can reprogram it
removeCommand(remoteName, cmdName);
Command cmd = recordCommand(cmdName);
if (cmd != null)
addCommand(baseRemote, cmd);
return (cmd != null);
}
private boolean ensureRemoteLoaded(String remoteName)
{
if (baseRemote == null || !baseRemote.name.equals(remoteName))
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
loadRemotes(remFile.toString());
java.io.BufferedReader inStream = null;
try
{
inStream = new java.io.BufferedReader(new java.io.FileReader(remFile));
String str = inStream.readLine();
if (str != null)
{
java.util.StringTokenizer toker = new java.util.StringTokenizer(str, " \t");
if (toker.countTokens() > 3)
{
toker.nextToken();
toker.nextToken();
toker.nextToken();
if (toker.hasMoreTokens())
baseRemote.channelDigits = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
baseRemote.buttonDelay = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
baseRemote.sequenceDelay = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
{
baseRemote.confirmCmd = toker.nextToken();
if ("|".equals(baseRemote.confirmCmd))
baseRemote.confirmCmd = null;
}
if (toker.hasMoreTokens())
baseRemote.prefixCmd = toker.nextToken();
}
}
}
catch (Exception e)
{
System.err.println("I/O Error loading remote control data of:" + e);
}
finally
{
if (inStream != null)
try{inStream.close();}catch(Exception e){}
}
if (baseRemote != null)
{
if (baseRemote.buttonDelay <= 0)
baseRemote.buttonDelay = Sage.WINDOWS_OS ? 600 : 800;
if (baseRemote.sequenceDelay <= 0)
baseRemote.sequenceDelay = 800;
initDevice();
}
}
else
{
loadRemotes(remoteName);
if (baseRemote != null)
{
baseRemote.channelDigits = Sage.getInt("lirc/remotes/" + remoteName + "/channel_digits", 3);
baseRemote.buttonDelay = Sage.getInt("lirc/remotes/" + remoteName + "/button_delay", 800);
baseRemote.sequenceDelay = Sage.getInt("lirc/remotes/" + remoteName + "/sequence_delay", 800);
baseRemote.confirmCmd = Sage.get("lirc/remotes/" + remoteName + "/confirm_cmd", "");
baseRemote.prefixCmd = Sage.get("lirc/remotes/" + remoteName + "/prefix_cmd", "");
initDevice();
}
}
}
return baseRemote != null;
}
// DO NOT MODIFY THE RETURNED DATA STRUCTURE!!
public synchronized Remote getRemoteInfo(String remoteName)
{
ensureRemoteLoaded(remoteName);
return baseRemote;
}
public synchronized void renameCommand(String remoteName, String oldCmdName, String newCmdName)
{
if (!ensureRemoteLoaded(remoteName)) return;
Command currCmd = baseRemote.command;
while (currCmd != null)
{
if (currCmd.name.equals(oldCmdName))
{
currCmd.name = newCmdName;
break;
}
currCmd = currCmd.next;
}
}
public synchronized void removeCommand(String remoteName, String cmdName)
{
if (!ensureRemoteLoaded(remoteName)) return;
Command currCmd = baseRemote.command;
Command lastCmd = null;
while (currCmd != null)
{
if (currCmd.name.equals(cmdName))
{
if (lastCmd == null)
baseRemote.command = currCmd.next;
else
lastCmd.next = currCmd.next;
break;
}
lastCmd = currCmd;
currCmd = currCmd.next;
}
}
private boolean hasRemoteFileData(String remoteName)
{
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (remFile.isFile() && remFile.length() > 0)
return true;
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
return (remFile.isFile() && remFile.length() > 0);
}
public synchronized void saveChanges()
{
if (baseRemote != null)
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.io.File remFile = new java.io.File(remoteDir, baseRemote.name + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, baseRemote.name + ".ir");
saveRemotes(remFile.toString());
// Load back the file data, and then rewrite the first line in our format
java.io.BufferedReader inStream = null;
java.io.PrintWriter outStream = null;
try
{
inStream = new java.io.BufferedReader(new java.io.FileReader(remFile));
StringBuffer sb = new StringBuffer();
sb.append(inStream.readLine());
sb.append(' ');
sb.append(baseRemote.channelDigits);
sb.append(' ');
sb.append(baseRemote.buttonDelay);
sb.append(' ');
sb.append(baseRemote.sequenceDelay);
if (baseRemote.confirmCmd != null && baseRemote.confirmCmd.length() > 0)
sb.append(" " + baseRemote.confirmCmd);
else if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
sb.append(" |"); // delimiter to separate prefixCmd
if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
sb.append(" " + baseRemote.prefixCmd);
sb.append("\r\n");
char[] buf = new char[1024];
int numRead = inStream.read(buf);
while (numRead != -1)
{
sb.append(buf, 0, numRead);
numRead = inStream.read(buf);
}
inStream.close();
inStream = null;
outStream = new java.io.PrintWriter(new java.io.BufferedWriter(new java.io.FileWriter(remFile)));
outStream.print(sb.toString());
}
catch (java.io.IOException e)
{
System.err.println("I/O Error resaving remote control data of:" + e);
}
finally
{
if (inStream != null)
try{inStream.close();}catch(Exception e){}
if (outStream != null)
try{outStream.close();}catch(Exception e){}
}
}
else
{
Sage.putInt("lirc/remotes/" + baseRemote.name + "/channel_digits", baseRemote.channelDigits);
Sage.putInt("lirc/remotes/" + baseRemote.name + "/button_delay", baseRemote.buttonDelay);
Sage.putInt("lirc/remotes/" + baseRemote.name + "/sequence_delay", baseRemote.sequenceDelay);
Sage.put("lirc/remotes/" + baseRemote.name + "/confirm_cmd", baseRemote.confirmCmd);
Sage.put("lirc/remotes/" + baseRemote.name + "/prefix_cmd", baseRemote.prefixCmd);
}
}
}
public synchronized void cancelChanges()
{
baseRemote = null;
}
public synchronized void removeRemote(String remoteName)
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
// This just erases the file
if (baseRemote != null && baseRemote.name.equals(remoteName))
baseRemote = null;
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
if (remFile.canWrite()) // read-only files are devices that can't be removed
remFile.delete();
}
}
public synchronized void setChannelDigits(int x)
{
if (baseRemote != null)
baseRemote.channelDigits = x;
}
public synchronized void setButtonDelay(int millis)
{
if (baseRemote != null)
baseRemote.buttonDelay = millis;
}
public synchronized void setSequenceDelay(int millis)
{
if (baseRemote != null)
baseRemote.sequenceDelay = millis;
}
public synchronized void setConfirmKey(String x)
{
if (baseRemote != null)
baseRemote.confirmCmd = x;
}
public synchronized void setPrefixKey(String x)
{
if (baseRemote != null)
baseRemote.prefixCmd = x;
}
public boolean isAlive() { return alive; }
public Remote getDefaultRemoteInfo() { return baseRemote; }
public int getMinChannel()
{
return 1;
}
public int getMaxChannel()
{
if (baseRemote != null)
if (baseRemote.channelDigits == 0)
return 999;
else return (int)Math.round(Math.pow(10, baseRemote.channelDigits)) - 1;
return 999;
}
public native String deviceName();
private native long findBitRate();
private native long findCarrierFrequency();
private native void initDevice(); // init before playback
private native void loadRemotes(String filename);
private native boolean needBitrate();
private native boolean needCarrierFrequency();
public synchronized boolean openPort(int portNum)
{
currPortNum = portNum;
if (!alive)
initialize();
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so") && UIManager.getLocalUI() != null)
{
if (UIManager.getLocalUI().getRouter() == null)
{
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening IR port " + portNum + ". Darn!");
return false;
}
}
else
{
// Sync the PVR150 xmt & recv
synchronized (UIManager.getLocalUI().getRouter().getDefaultInputPlugin())
{
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening IR port " + portNum + ". Darn!");
return false;
}
}
}
if (Sage.DBG) System.out.println("SUCCESSFULLY opened IRTuner on port " + portNum);
return true;
}
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening COM port " + portNum + ". Trying again!");
closeDevice();
openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening COM port " + portNum + ". Darn!");
return false;
}
}
if (Sage.DBG) System.out.println("SUCCESSFULLY opened IRTuner on port " + portNum);
return true;
}
private native boolean openDevice(int portNum);
private native void playCommand(Remote theRemote, String cmdName, int repeat);
private native Command recordCommand(String commandName);
private native void saveRemotes(String filename);
private native void init0();
private native void goodbye0();
private native boolean canMacroTune();
private native void macroTune(int number);
private String devFilename;
private Remote baseRemote;
private long nativePort;
private long nativeDllHandle;
private java.io.File remoteDir;
private java.io.File remoteDir2;
private java.util.Vector tuneVec;
private boolean asyncTuning;
private Thread asyncThread;
private int currPortNum;
private long lastIRTime;
private long globalPreroll;
private boolean alive;
}
|
googleapis/google-api-java-client-services | 35,693 | clients/google-api-services-cloudtrace/v1/1.26.0/com/google/api/services/cloudtrace/v1/CloudTrace.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudtrace.v1;
/**
* Service definition for CloudTrace (v1).
*
* <p>
* Sends application trace data to Stackdriver Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Trace API directly. If you are looking to instrument your application for Stackdriver Trace, we recommend using OpenCensus.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/trace" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link CloudTraceRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class CloudTrace extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.26.0 of the Stackdriver Trace API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://cloudtrace.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public CloudTrace(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
CloudTrace(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Projects collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudTrace cloudtrace = new CloudTrace(...);}
* {@code CloudTrace.Projects.List request = cloudtrace.projects().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Projects projects() {
return new Projects();
}
/**
* The "projects" collection of methods.
*/
public class Projects {
/**
* Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you
* send matches that of an existing trace, any fields in the existing trace and its spans are
* overwritten by the provided values, and any new fields provided are merged with the existing
* trace data. If the ID does not match, a new trace is created.
*
* Create a request for the method "projects.patchTraces".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link PatchTraces#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces}
* @return the request
*/
public PatchTraces patchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) throws java.io.IOException {
PatchTraces result = new PatchTraces(projectId, content);
initialize(result);
return result;
}
public class PatchTraces extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Empty> {
private static final String REST_PATH = "v1/projects/{projectId}/traces";
/**
* Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you
* send matches that of an existing trace, any fields in the existing trace and its spans are
* overwritten by the provided values, and any new fields provided are merged with the existing
* trace data. If the ID does not match, a new trace is created.
*
* Create a request for the method "projects.patchTraces".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link PatchTraces#execute()} method to invoke the remote
* operation. <p> {@link
* PatchTraces#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces}
* @since 1.13
*/
protected PatchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) {
super(CloudTrace.this, "PATCH", REST_PATH, content, com.google.api.services.cloudtrace.v1.model.Empty.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
}
@Override
public PatchTraces set$Xgafv(java.lang.String $Xgafv) {
return (PatchTraces) super.set$Xgafv($Xgafv);
}
@Override
public PatchTraces setAccessToken(java.lang.String accessToken) {
return (PatchTraces) super.setAccessToken(accessToken);
}
@Override
public PatchTraces setAlt(java.lang.String alt) {
return (PatchTraces) super.setAlt(alt);
}
@Override
public PatchTraces setCallback(java.lang.String callback) {
return (PatchTraces) super.setCallback(callback);
}
@Override
public PatchTraces setFields(java.lang.String fields) {
return (PatchTraces) super.setFields(fields);
}
@Override
public PatchTraces setKey(java.lang.String key) {
return (PatchTraces) super.setKey(key);
}
@Override
public PatchTraces setOauthToken(java.lang.String oauthToken) {
return (PatchTraces) super.setOauthToken(oauthToken);
}
@Override
public PatchTraces setPrettyPrint(java.lang.Boolean prettyPrint) {
return (PatchTraces) super.setPrettyPrint(prettyPrint);
}
@Override
public PatchTraces setQuotaUser(java.lang.String quotaUser) {
return (PatchTraces) super.setQuotaUser(quotaUser);
}
@Override
public PatchTraces setUploadType(java.lang.String uploadType) {
return (PatchTraces) super.setUploadType(uploadType);
}
@Override
public PatchTraces setUploadProtocol(java.lang.String uploadProtocol) {
return (PatchTraces) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public PatchTraces setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
@Override
public PatchTraces set(String parameterName, Object value) {
return (PatchTraces) super.set(parameterName, value);
}
}
/**
* An accessor for creating requests from the Traces collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudTrace cloudtrace = new CloudTrace(...);}
* {@code CloudTrace.Traces.List request = cloudtrace.traces().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Traces traces() {
return new Traces();
}
/**
* The "traces" collection of methods.
*/
public class Traces {
/**
* Gets a single trace by its ID.
*
* Create a request for the method "traces.get".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param traceId ID of the trace to return.
* @return the request
*/
public Get get(java.lang.String projectId, java.lang.String traceId) throws java.io.IOException {
Get result = new Get(projectId, traceId);
initialize(result);
return result;
}
public class Get extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Trace> {
private static final String REST_PATH = "v1/projects/{projectId}/traces/{traceId}";
/**
* Gets a single trace by its ID.
*
* Create a request for the method "traces.get".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p>
* {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param traceId ID of the trace to return.
* @since 1.13
*/
protected Get(java.lang.String projectId, java.lang.String traceId) {
super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.Trace.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
this.traceId = com.google.api.client.util.Preconditions.checkNotNull(traceId, "Required parameter traceId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public Get setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
/** ID of the trace to return. */
@com.google.api.client.util.Key
private java.lang.String traceId;
/** ID of the trace to return.
*/
public java.lang.String getTraceId() {
return traceId;
}
/** ID of the trace to return. */
public Get setTraceId(java.lang.String traceId) {
this.traceId = traceId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Returns of a list of traces that match the specified filter conditions.
*
* Create a request for the method "traces.list".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @return the request
*/
public List list(java.lang.String projectId) throws java.io.IOException {
List result = new List(projectId);
initialize(result);
return result;
}
public class List extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.ListTracesResponse> {
private static final String REST_PATH = "v1/projects/{projectId}/traces";
/**
* Returns of a list of traces that match the specified filter conditions.
*
* Create a request for the method "traces.list".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @since 1.13
*/
protected List(java.lang.String projectId) {
super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.ListTracesResponse.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public List setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
/**
* End of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
@com.google.api.client.util.Key
private String endTime;
/** End of the time interval (inclusive) during which the trace data was collected from the
application.
*/
public String getEndTime() {
return endTime;
}
/**
* End of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
public List setEndTime(String endTime) {
this.endTime = endTime;
return this;
}
/**
* An optional filter against labels for the request.
*
* By default, searches use prefix matching. To specify exact match, prepend a plus symbol
* (`+`) to the search term. Multiple terms are ANDed. Syntax:
*
* * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
* `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is
* exactly `NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with
* `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name is exactly `NAME`. *
* `latency:DURATION`: Return traces whose overall latency is greater or equal to than
* `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), and seconds
* (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose overall latency
* is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return all traces
* containing the specified label key (exact match, case-sensitive) regardless of the
* key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return all
* traces containing the specified label key (exact match, case-sensitive) whose value
* starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
* `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the
* specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent
* to `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
@com.google.api.client.util.Key
private java.lang.String filter;
/** An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match, prepend a plus symbol (`+`) to
the search term. Multiple terms are ANDed. Syntax:
* `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
`NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is exactly
`NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with `NAME_PREFIX`. *
`+span:NAME`: Return traces where any span's name is exactly `NAME`. * `latency:DURATION`: Return
traces whose overall latency is greater or equal to than `DURATION`. Accepted units are nanoseconds
(`ns`), milliseconds (`ms`), and seconds (`s`). Default is `ms`. For example, `latency:24ms`
returns traces whose overall latency is greater than or equal to 24 milliseconds. *
`label:LABEL_KEY`: Return all traces containing the specified label key (exact match, case-
sensitive) regardless of the key:value pair's value (including empty values). *
`LABEL_KEY:VALUE_PREFIX`: Return all traces containing the specified label key (exact match, case-
sensitive) whose value starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
`+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the specified
text. Both a key and a value must be specified. * `method:VALUE`: Equivalent to
`/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
public java.lang.String getFilter() {
return filter;
}
/**
* An optional filter against labels for the request.
*
* By default, searches use prefix matching. To specify exact match, prepend a plus symbol
* (`+`) to the search term. Multiple terms are ANDed. Syntax:
*
* * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
* `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is
* exactly `NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with
* `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name is exactly `NAME`. *
* `latency:DURATION`: Return traces whose overall latency is greater or equal to than
* `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), and seconds
* (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose overall latency
* is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return all traces
* containing the specified label key (exact match, case-sensitive) regardless of the
* key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return all
* traces containing the specified label key (exact match, case-sensitive) whose value
* starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
* `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the
* specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent
* to `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/**
* Field used to sort the returned traces. Optional. Can be one of the following:
*
* * `trace_id` * `name` (`name` field of root span in the trace) * `duration`
* (difference between `end_time` and `start_time` fields of the root span) * `start`
* (`start_time` field of the root span)
*
* Descending order can be specified by appending `desc` to the sort field (for example,
* `name desc`).
*
* Only one sort field is permitted.
*/
@com.google.api.client.util.Key
private java.lang.String orderBy;
/** Field used to sort the returned traces. Optional. Can be one of the following:
* `trace_id` * `name` (`name` field of root span in the trace) * `duration` (difference
between `end_time` and `start_time` fields of the root span) * `start` (`start_time` field of the
root span)
Descending order can be specified by appending `desc` to the sort field (for example, `name desc`).
Only one sort field is permitted.
*/
public java.lang.String getOrderBy() {
return orderBy;
}
/**
* Field used to sort the returned traces. Optional. Can be one of the following:
*
* * `trace_id` * `name` (`name` field of root span in the trace) * `duration`
* (difference between `end_time` and `start_time` fields of the root span) * `start`
* (`start_time` field of the root span)
*
* Descending order can be specified by appending `desc` to the sort field (for example,
* `name desc`).
*
* Only one sort field is permitted.
*/
public List setOrderBy(java.lang.String orderBy) {
this.orderBy = orderBy;
return this;
}
/**
* Maximum number of traces to return. If not specified or <= 0, the implementation selects
* a reasonable value. The implementation may return fewer traces than the requested page
* size. Optional.
*/
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** Maximum number of traces to return. If not specified or <= 0, the implementation selects a
reasonable value. The implementation may return fewer traces than the requested page size.
Optional.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/**
* Maximum number of traces to return. If not specified or <= 0, the implementation selects
* a reasonable value. The implementation may return fewer traces than the requested page
* size. Optional.
*/
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* Token identifying the page of results to return. If provided, use the value of the
* `next_page_token` field from a previous request. Optional.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** Token identifying the page of results to return. If provided, use the value of the
`next_page_token` field from a previous request. Optional.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* Token identifying the page of results to return. If provided, use the value of the
* `next_page_token` field from a previous request. Optional.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
/**
* Start of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
@com.google.api.client.util.Key
private String startTime;
/** Start of the time interval (inclusive) during which the trace data was collected from the
application.
*/
public String getStartTime() {
return startTime;
}
/**
* Start of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
public List setStartTime(String startTime) {
this.startTime = startTime;
return this;
}
/**
* Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
@com.google.api.client.util.Key
private java.lang.String view;
/** Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
public java.lang.String getView() {
return view;
}
/**
* Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
public List setView(java.lang.String view) {
this.view = view;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link CloudTrace}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link CloudTrace}. */
@Override
public CloudTrace build() {
return new CloudTrace(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link CloudTraceRequestInitializer}.
*
* @since 1.12
*/
public Builder setCloudTraceRequestInitializer(
CloudTraceRequestInitializer cloudtraceRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(cloudtraceRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-api-java-client-services | 35,693 | clients/google-api-services-cloudtrace/v1/1.27.0/com/google/api/services/cloudtrace/v1/CloudTrace.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudtrace.v1;
/**
* Service definition for CloudTrace (v1).
*
* <p>
* Sends application trace data to Stackdriver Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Trace API directly. If you are looking to instrument your application for Stackdriver Trace, we recommend using OpenCensus.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/trace" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link CloudTraceRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class CloudTrace extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.27.0 of the Stackdriver Trace API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://cloudtrace.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public CloudTrace(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
CloudTrace(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Projects collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudTrace cloudtrace = new CloudTrace(...);}
* {@code CloudTrace.Projects.List request = cloudtrace.projects().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Projects projects() {
return new Projects();
}
/**
* The "projects" collection of methods.
*/
public class Projects {
/**
* Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you
* send matches that of an existing trace, any fields in the existing trace and its spans are
* overwritten by the provided values, and any new fields provided are merged with the existing
* trace data. If the ID does not match, a new trace is created.
*
* Create a request for the method "projects.patchTraces".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link PatchTraces#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces}
* @return the request
*/
public PatchTraces patchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) throws java.io.IOException {
PatchTraces result = new PatchTraces(projectId, content);
initialize(result);
return result;
}
public class PatchTraces extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Empty> {
private static final String REST_PATH = "v1/projects/{projectId}/traces";
/**
* Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you
* send matches that of an existing trace, any fields in the existing trace and its spans are
* overwritten by the provided values, and any new fields provided are merged with the existing
* trace data. If the ID does not match, a new trace is created.
*
* Create a request for the method "projects.patchTraces".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link PatchTraces#execute()} method to invoke the remote
* operation. <p> {@link
* PatchTraces#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces}
* @since 1.13
*/
protected PatchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) {
super(CloudTrace.this, "PATCH", REST_PATH, content, com.google.api.services.cloudtrace.v1.model.Empty.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
}
@Override
public PatchTraces set$Xgafv(java.lang.String $Xgafv) {
return (PatchTraces) super.set$Xgafv($Xgafv);
}
@Override
public PatchTraces setAccessToken(java.lang.String accessToken) {
return (PatchTraces) super.setAccessToken(accessToken);
}
@Override
public PatchTraces setAlt(java.lang.String alt) {
return (PatchTraces) super.setAlt(alt);
}
@Override
public PatchTraces setCallback(java.lang.String callback) {
return (PatchTraces) super.setCallback(callback);
}
@Override
public PatchTraces setFields(java.lang.String fields) {
return (PatchTraces) super.setFields(fields);
}
@Override
public PatchTraces setKey(java.lang.String key) {
return (PatchTraces) super.setKey(key);
}
@Override
public PatchTraces setOauthToken(java.lang.String oauthToken) {
return (PatchTraces) super.setOauthToken(oauthToken);
}
@Override
public PatchTraces setPrettyPrint(java.lang.Boolean prettyPrint) {
return (PatchTraces) super.setPrettyPrint(prettyPrint);
}
@Override
public PatchTraces setQuotaUser(java.lang.String quotaUser) {
return (PatchTraces) super.setQuotaUser(quotaUser);
}
@Override
public PatchTraces setUploadType(java.lang.String uploadType) {
return (PatchTraces) super.setUploadType(uploadType);
}
@Override
public PatchTraces setUploadProtocol(java.lang.String uploadProtocol) {
return (PatchTraces) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public PatchTraces setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
@Override
public PatchTraces set(String parameterName, Object value) {
return (PatchTraces) super.set(parameterName, value);
}
}
/**
* An accessor for creating requests from the Traces collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudTrace cloudtrace = new CloudTrace(...);}
* {@code CloudTrace.Traces.List request = cloudtrace.traces().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Traces traces() {
return new Traces();
}
/**
* The "traces" collection of methods.
*/
public class Traces {
/**
* Gets a single trace by its ID.
*
* Create a request for the method "traces.get".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param traceId ID of the trace to return.
* @return the request
*/
public Get get(java.lang.String projectId, java.lang.String traceId) throws java.io.IOException {
Get result = new Get(projectId, traceId);
initialize(result);
return result;
}
public class Get extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Trace> {
private static final String REST_PATH = "v1/projects/{projectId}/traces/{traceId}";
/**
* Gets a single trace by its ID.
*
* Create a request for the method "traces.get".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p>
* {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @param traceId ID of the trace to return.
* @since 1.13
*/
protected Get(java.lang.String projectId, java.lang.String traceId) {
super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.Trace.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
this.traceId = com.google.api.client.util.Preconditions.checkNotNull(traceId, "Required parameter traceId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public Get setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
/** ID of the trace to return. */
@com.google.api.client.util.Key
private java.lang.String traceId;
/** ID of the trace to return.
*/
public java.lang.String getTraceId() {
return traceId;
}
/** ID of the trace to return. */
public Get setTraceId(java.lang.String traceId) {
this.traceId = traceId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Returns of a list of traces that match the specified filter conditions.
*
* Create a request for the method "traces.list".
*
* This request holds the parameters needed by the cloudtrace server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @return the request
*/
public List list(java.lang.String projectId) throws java.io.IOException {
List result = new List(projectId);
initialize(result);
return result;
}
public class List extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.ListTracesResponse> {
private static final String REST_PATH = "v1/projects/{projectId}/traces";
/**
* Returns of a list of traces that match the specified filter conditions.
*
* Create a request for the method "traces.list".
*
* This request holds the parameters needed by the the cloudtrace server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param projectId ID of the Cloud project where the trace data is stored.
* @since 1.13
*/
protected List(java.lang.String projectId) {
super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.ListTracesResponse.class);
this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** ID of the Cloud project where the trace data is stored. */
@com.google.api.client.util.Key
private java.lang.String projectId;
/** ID of the Cloud project where the trace data is stored.
*/
public java.lang.String getProjectId() {
return projectId;
}
/** ID of the Cloud project where the trace data is stored. */
public List setProjectId(java.lang.String projectId) {
this.projectId = projectId;
return this;
}
/**
* End of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
@com.google.api.client.util.Key
private String endTime;
/** End of the time interval (inclusive) during which the trace data was collected from the
application.
*/
public String getEndTime() {
return endTime;
}
/**
* End of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
public List setEndTime(String endTime) {
this.endTime = endTime;
return this;
}
/**
* An optional filter against labels for the request.
*
* By default, searches use prefix matching. To specify exact match, prepend a plus symbol
* (`+`) to the search term. Multiple terms are ANDed. Syntax:
*
* * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
* `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is
* exactly `NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with
* `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name is exactly `NAME`. *
* `latency:DURATION`: Return traces whose overall latency is greater or equal to than
* `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), and seconds
* (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose overall latency
* is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return all traces
* containing the specified label key (exact match, case-sensitive) regardless of the
* key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return all
* traces containing the specified label key (exact match, case-sensitive) whose value
* starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
* `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the
* specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent
* to `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
@com.google.api.client.util.Key
private java.lang.String filter;
/** An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match, prepend a plus symbol (`+`) to
the search term. Multiple terms are ANDed. Syntax:
* `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
`NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is exactly
`NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with `NAME_PREFIX`. *
`+span:NAME`: Return traces where any span's name is exactly `NAME`. * `latency:DURATION`: Return
traces whose overall latency is greater or equal to than `DURATION`. Accepted units are nanoseconds
(`ns`), milliseconds (`ms`), and seconds (`s`). Default is `ms`. For example, `latency:24ms`
returns traces whose overall latency is greater than or equal to 24 milliseconds. *
`label:LABEL_KEY`: Return all traces containing the specified label key (exact match, case-
sensitive) regardless of the key:value pair's value (including empty values). *
`LABEL_KEY:VALUE_PREFIX`: Return all traces containing the specified label key (exact match, case-
sensitive) whose value starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
`+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the specified
text. Both a key and a value must be specified. * `method:VALUE`: Equivalent to
`/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
public java.lang.String getFilter() {
return filter;
}
/**
* An optional filter against labels for the request.
*
* By default, searches use prefix matching. To specify exact match, prepend a plus symbol
* (`+`) to the search term. Multiple terms are ANDed. Syntax:
*
* * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with
* `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is
* exactly `NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with
* `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name is exactly `NAME`. *
* `latency:DURATION`: Return traces whose overall latency is greater or equal to than
* `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), and seconds
* (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose overall latency
* is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return all traces
* containing the specified label key (exact match, case-sensitive) regardless of the
* key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return all
* traces containing the specified label key (exact match, case-sensitive) whose value
* starts with `VALUE_PREFIX`. Both a key and a value must be specified. *
* `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the
* specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent
* to `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`.
*/
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/**
* Field used to sort the returned traces. Optional. Can be one of the following:
*
* * `trace_id` * `name` (`name` field of root span in the trace) * `duration`
* (difference between `end_time` and `start_time` fields of the root span) * `start`
* (`start_time` field of the root span)
*
* Descending order can be specified by appending `desc` to the sort field (for example,
* `name desc`).
*
* Only one sort field is permitted.
*/
@com.google.api.client.util.Key
private java.lang.String orderBy;
/** Field used to sort the returned traces. Optional. Can be one of the following:
* `trace_id` * `name` (`name` field of root span in the trace) * `duration` (difference
between `end_time` and `start_time` fields of the root span) * `start` (`start_time` field of the
root span)
Descending order can be specified by appending `desc` to the sort field (for example, `name desc`).
Only one sort field is permitted.
*/
public java.lang.String getOrderBy() {
return orderBy;
}
/**
* Field used to sort the returned traces. Optional. Can be one of the following:
*
* * `trace_id` * `name` (`name` field of root span in the trace) * `duration`
* (difference between `end_time` and `start_time` fields of the root span) * `start`
* (`start_time` field of the root span)
*
* Descending order can be specified by appending `desc` to the sort field (for example,
* `name desc`).
*
* Only one sort field is permitted.
*/
public List setOrderBy(java.lang.String orderBy) {
this.orderBy = orderBy;
return this;
}
/**
* Maximum number of traces to return. If not specified or <= 0, the implementation selects
* a reasonable value. The implementation may return fewer traces than the requested page
* size. Optional.
*/
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** Maximum number of traces to return. If not specified or <= 0, the implementation selects a
reasonable value. The implementation may return fewer traces than the requested page size.
Optional.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/**
* Maximum number of traces to return. If not specified or <= 0, the implementation selects
* a reasonable value. The implementation may return fewer traces than the requested page
* size. Optional.
*/
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* Token identifying the page of results to return. If provided, use the value of the
* `next_page_token` field from a previous request. Optional.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** Token identifying the page of results to return. If provided, use the value of the
`next_page_token` field from a previous request. Optional.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* Token identifying the page of results to return. If provided, use the value of the
* `next_page_token` field from a previous request. Optional.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
/**
* Start of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
@com.google.api.client.util.Key
private String startTime;
/** Start of the time interval (inclusive) during which the trace data was collected from the
application.
*/
public String getStartTime() {
return startTime;
}
/**
* Start of the time interval (inclusive) during which the trace data was collected from the
* application.
*/
public List setStartTime(String startTime) {
this.startTime = startTime;
return this;
}
/**
* Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
@com.google.api.client.util.Key
private java.lang.String view;
/** Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
public java.lang.String getView() {
return view;
}
/**
* Type of data returned for traces in the list. Optional. Default is `MINIMAL`.
*/
public List setView(java.lang.String view) {
this.view = view;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link CloudTrace}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link CloudTrace}. */
@Override
public CloudTrace build() {
return new CloudTrace(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link CloudTraceRequestInitializer}.
*
* @since 1.12
*/
public Builder setCloudTraceRequestInitializer(
CloudTraceRequestInitializer cloudtraceRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(cloudtraceRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
openjdk/jdk8 | 35,552 | corba/src/share/classes/com/sun/corba/se/impl/encoding/IDLJavaSerializationInputStream.java | /*
* Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.corba.se.impl.encoding;
import java.io.Serializable;
import java.io.ObjectInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.math.BigDecimal;
import java.util.LinkedList;
import com.sun.corba.se.spi.orb.ORB;
import com.sun.corba.se.spi.ior.IOR;
import com.sun.corba.se.spi.ior.IORFactories;
import com.sun.corba.se.spi.ior.iiop.GIOPVersion;
import com.sun.corba.se.spi.logging.CORBALogDomains;
import com.sun.corba.se.spi.presentation.rmi.StubAdapter;
import com.sun.corba.se.spi.presentation.rmi.PresentationManager;
import com.sun.corba.se.spi.presentation.rmi.PresentationDefaults;
import com.sun.corba.se.impl.util.Utility;
import com.sun.corba.se.impl.orbutil.ORBUtility;
import com.sun.corba.se.impl.corba.TypeCodeImpl;
import com.sun.corba.se.impl.util.RepositoryId;
import com.sun.corba.se.impl.orbutil.ORBConstants;
import com.sun.corba.se.impl.logging.ORBUtilSystemException;
import com.sun.corba.se.impl.protocol.giopmsgheaders.Message;
import org.omg.CORBA.Any;
import org.omg.CORBA.TypeCode;
import org.omg.CORBA.Principal;
import org.omg.CORBA.portable.IDLEntity;
/**
* Implementation class that uses Java serialization for input streams.
* This assumes a GIOP version 1.2 message format.
*
* This class uses a ByteArrayInputStream as the underlying buffer. The
* first 16 bytes are directly read out of the underlying buffer. This allows
* [GIOPHeader (12 bytes) + requestID (4 bytes)] to be read as bytes.
* Subsequent write operations on this output stream object uses
* ObjectInputStream class to read into the buffer. This allows unmarshaling
* complex types and graphs using the ObjectInputStream implementation.
*
* Note, this class assumes a GIOP 1.2 style header. Further, the first
* 12 bytes, that is, the GIOPHeader is read directly from the received
* message, before this stream object is called. So, this class effectively
* reads only the requestID (4 bytes) directly, and uses the
* ObjectInputStream for further unmarshaling.
*
* @author Ram Jeyaraman
*/
public class IDLJavaSerializationInputStream extends CDRInputStreamBase {
private ORB orb;
private int bufSize;
private ByteBuffer buffer;
private byte encodingVersion;
private ObjectInputStream is;
private _ByteArrayInputStream bis;
private BufferManagerRead bufferManager;
// [GIOPHeader(12) + requestID(4)] bytes
private final int directReadLength = Message.GIOPMessageHeaderLength + 4;
// Used for mark / reset operations.
private boolean markOn;
private int peekIndex, peekCount;
private LinkedList markedItemQ = new LinkedList();
protected ORBUtilSystemException wrapper;
class _ByteArrayInputStream extends ByteArrayInputStream {
_ByteArrayInputStream(byte[] buf) {
super(buf);
}
int getPosition() {
return this.pos;
}
void setPosition(int value) {
if (value < 0 || value > count) {
throw new IndexOutOfBoundsException();
}
this.pos = value;
}
}
class MarshalObjectInputStream extends ObjectInputStream {
ORB orb;
MarshalObjectInputStream(java.io.InputStream out, ORB orb)
throws IOException {
super(out);
this.orb = orb;
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction() {
public Object run() {
// needs SerializablePermission("enableSubstitution")
enableResolveObject(true);
return null;
}
}
);
}
/**
* Connect the Stub to the ORB.
*/
protected final Object resolveObject(Object obj) throws IOException {
try {
if (StubAdapter.isStub(obj)) {
StubAdapter.connect(obj, orb);
}
} catch (java.rmi.RemoteException re) {
IOException ie = new IOException("resolveObject failed");
ie.initCause(re);
throw ie;
}
return obj;
}
}
public IDLJavaSerializationInputStream(byte encodingVersion) {
super();
this.encodingVersion = encodingVersion;
}
public void init(org.omg.CORBA.ORB orb,
ByteBuffer byteBuffer,
int bufSize,
boolean littleEndian,
BufferManagerRead bufferManager) {
this.orb = (ORB) orb;
this.bufSize = bufSize;
this.bufferManager = bufferManager;
buffer = byteBuffer;
wrapper =
ORBUtilSystemException.get((ORB)orb, CORBALogDomains.RPC_ENCODING);
byte[] buf;
if (buffer.hasArray()) {
buf = buffer.array();
} else {
buf = new byte[bufSize];
buffer.get(buf);
}
// Note: at this point, the buffer position is zero. The setIndex()
// method call can be used to set a desired read index.
bis = new _ByteArrayInputStream(buf);
}
// Called from read_octet or read_long or read_ulong method.
private void initObjectInputStream() {
//System.out.print(" is ");
if (is != null) {
throw wrapper.javaStreamInitFailed();
}
try {
is = new MarshalObjectInputStream(bis, orb);
} catch (Exception e) {
throw wrapper.javaStreamInitFailed(e);
}
}
// org.omg.CORBA.portable.InputStream
// Primitive types.
public boolean read_boolean() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Boolean)markedItemQ.removeFirst()).booleanValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Boolean)markedItemQ.get(peekIndex++)).booleanValue();
}
try {
boolean value = is.readBoolean();
if (markOn) { // enqueue
markedItemQ.addLast(Boolean.valueOf(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_boolean");
}
}
public char read_char() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Character)markedItemQ.removeFirst()).charValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Character)markedItemQ.get(peekIndex++)).charValue();
}
try {
char value = is.readChar();
if (markOn) { // enqueue
markedItemQ.addLast(new Character(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_char");
}
}
public char read_wchar() {
return this.read_char();
}
public byte read_octet() {
// check if size < [ GIOPHeader(12) + requestID(4)] bytes
if (bis.getPosition() < directReadLength) {
byte b = (byte) bis.read();
if (bis.getPosition() == directReadLength) {
initObjectInputStream();
}
return b;
}
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Byte)markedItemQ.removeFirst()).byteValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Byte)markedItemQ.get(peekIndex++)).byteValue();
}
try {
byte value = is.readByte();
if (markOn) { // enqueue
//markedItemQ.addLast(Byte.valueOf(value)); // only in JDK 1.5
markedItemQ.addLast(new Byte(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_octet");
}
}
public short read_short() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Short)markedItemQ.removeFirst()).shortValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Short)markedItemQ.get(peekIndex++)).shortValue();
}
try {
short value = is.readShort();
if (markOn) { // enqueue
markedItemQ.addLast(new Short(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_short");
}
}
public short read_ushort() {
return this.read_short();
}
public int read_long() {
// check if size < [ GIOPHeader(12) + requestID(4)] bytes
if (bis.getPosition() < directReadLength) {
// Use big endian (network byte order). This is fixed.
// Both the writer and reader use the same byte order.
int b1 = (bis.read() << 24) & 0xFF000000;
int b2 = (bis.read() << 16) & 0x00FF0000;
int b3 = (bis.read() << 8) & 0x0000FF00;
int b4 = (bis.read() << 0) & 0x000000FF;
if (bis.getPosition() == directReadLength) {
initObjectInputStream();
} else if (bis.getPosition() > directReadLength) {
// Cannot happen. All direct reads are contained
// within the first 16 bytes.
wrapper.javaSerializationException("read_long");
}
return (b1 | b2 | b3 | b4);
}
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Integer)markedItemQ.removeFirst()).intValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Integer)markedItemQ.get(peekIndex++)).intValue();
}
try {
int value = is.readInt();
if (markOn) { // enqueue
markedItemQ.addLast(new Integer(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_long");
}
}
public int read_ulong() {
return this.read_long();
}
public long read_longlong() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Long)markedItemQ.removeFirst()).longValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Long)markedItemQ.get(peekIndex++)).longValue();
}
try {
long value = is.readLong();
if (markOn) { // enqueue
markedItemQ.addLast(new Long(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_longlong");
}
}
public long read_ulonglong() {
return read_longlong();
}
public float read_float() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Float)markedItemQ.removeFirst()).floatValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Float)markedItemQ.get(peekIndex++)).floatValue();
}
try {
float value = is.readFloat();
if (markOn) { // enqueue
markedItemQ.addLast(new Float(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_float");
}
}
public double read_double() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return ((Double)markedItemQ.removeFirst()).doubleValue();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return ((Double)markedItemQ.get(peekIndex++)).doubleValue();
}
try {
double value = is.readDouble();
if (markOn) { // enqueue
markedItemQ.addLast(new Double(value));
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_double");
}
}
// String types.
public String read_string() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return (String) markedItemQ.removeFirst();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return (String) markedItemQ.get(peekIndex++);
}
try {
String value = is.readUTF();
if (markOn) { // enqueue
markedItemQ.addLast(value);
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_string");
}
}
public String read_wstring() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return (String) markedItemQ.removeFirst();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return (String) markedItemQ.get(peekIndex++);
}
try {
String value = (String) is.readObject();
if (markOn) { // enqueue
markedItemQ.addLast(value);
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_wstring");
}
}
// Array types.
public void read_boolean_array(boolean[] value, int offset, int length){
for(int i = 0; i < length; i++) {
value[i+offset] = read_boolean();
}
}
public void read_char_array(char[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_char();
}
}
public void read_wchar_array(char[] value, int offset, int length) {
read_char_array(value, offset, length);
}
public void read_octet_array(byte[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_octet();
}
/* // Cannot use this efficient read due to mark/reset support.
try {
while (length > 0) {
int n = is.read(value, offset, length);
offset += n;
length -= n;
}
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_octet_array");
}
*/
}
public void read_short_array(short[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_short();
}
}
public void read_ushort_array(short[] value, int offset, int length) {
read_short_array(value, offset, length);
}
public void read_long_array(int[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_long();
}
}
public void read_ulong_array(int[] value, int offset, int length) {
read_long_array(value, offset, length);
}
public void read_longlong_array(long[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_longlong();
}
}
public void read_ulonglong_array(long[] value, int offset, int length) {
read_longlong_array(value, offset, length);
}
public void read_float_array(float[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_float();
}
}
public void read_double_array(double[] value, int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_double();
}
}
// Complex types.
public org.omg.CORBA.Object read_Object() {
return read_Object(null);
}
public TypeCode read_TypeCode() {
TypeCodeImpl tc = new TypeCodeImpl(orb);
tc.read_value(parent);
return tc;
}
public Any read_any() {
Any any = orb.create_any();
TypeCodeImpl tc = new TypeCodeImpl(orb);
// read off the typecode
// REVISIT We could avoid this try-catch if we could peek the typecode
// kind off this stream and see if it is a tk_value.
// Looking at the code we know that for tk_value the Any.read_value()
// below ignores the tc argument anyway (except for the kind field).
// But still we would need to make sure that the whole typecode,
// including encapsulations, is read off.
try {
tc.read_value(parent);
} catch (org.omg.CORBA.MARSHAL ex) {
if (tc.kind().value() != org.omg.CORBA.TCKind._tk_value) {
throw ex;
}
// We can be sure that the whole typecode encapsulation has been
// read off.
ex.printStackTrace();
}
// read off the value of the any.
any.read_value(parent, tc);
return any;
}
public Principal read_Principal() {
// We don't need an implementation for this method, since principal
// is absent in GIOP version 1.2 or above.
int len = read_long();
byte[] pvalue = new byte[len];
read_octet_array(pvalue,0,len);
Principal p = new com.sun.corba.se.impl.corba.PrincipalImpl();
p.name(pvalue);
return p;
}
public BigDecimal read_fixed() {
return new BigDecimal(read_fixed_buffer().toString());
}
// Each octet contains (up to) two decimal digits. If the fixed type has
// an odd number of decimal digits, then the representation
// begins with the first (most significant) digit.
// Otherwise, this first half-octet is all zero, and the first digit
// is in the second half-octet.
// The sign configuration, in the last half-octet of the representation,
// is 0xD for negative numbers and 0xC for positive and zero values.
private StringBuffer read_fixed_buffer() {
StringBuffer buffer = new StringBuffer(64);
byte doubleDigit;
int firstDigit;
int secondDigit;
boolean wroteFirstDigit = false;
boolean more = true;
while (more) {
doubleDigit = read_octet();
firstDigit = (int)((doubleDigit & 0xf0) >> 4);
secondDigit = (int)(doubleDigit & 0x0f);
if (wroteFirstDigit || firstDigit != 0) {
buffer.append(Character.forDigit(firstDigit, 10));
wroteFirstDigit = true;
}
if (secondDigit == 12) {
// positive number or zero
if ( ! wroteFirstDigit) {
// zero
return new StringBuffer("0.0");
} else {
// positive number
// done
}
more = false;
} else if (secondDigit == 13) {
// negative number
buffer.insert(0, '-');
more = false;
} else {
buffer.append(Character.forDigit(secondDigit, 10));
wroteFirstDigit = true;
}
}
return buffer;
}
public org.omg.CORBA.Object read_Object(java.lang.Class clz) {
// In any case, we must first read the IOR.
IOR ior = IORFactories.makeIOR(parent) ;
if (ior.isNil()) {
return null;
}
PresentationManager.StubFactoryFactory sff =
ORB.getStubFactoryFactory();
String codeBase = ior.getProfile().getCodebase();
PresentationManager.StubFactory stubFactory = null;
if (clz == null) {
RepositoryId rid = RepositoryId.cache.getId(ior.getTypeId() );
String className = rid.getClassName();
boolean isIDLInterface = rid.isIDLType();
if (className == null || className.equals( "" )) {
stubFactory = null;
} else {
try {
stubFactory = sff.createStubFactory(className,
isIDLInterface, codeBase, (Class) null,
(ClassLoader) null);
} catch (Exception exc) {
// Could not create stubFactory, so use null.
// XXX stubFactory handling is still too complex:
// Can we resolve the stubFactory question once in
// a single place?
stubFactory = null ;
}
}
} else if (StubAdapter.isStubClass(clz)) {
stubFactory = PresentationDefaults.makeStaticStubFactory(clz);
} else {
// clz is an interface class
boolean isIDL = IDLEntity.class.isAssignableFrom(clz);
stubFactory = sff.createStubFactory(
clz.getName(), isIDL, codeBase, clz, clz.getClassLoader());
}
return CDRInputStream_1_0.internalIORToObject(ior, stubFactory, orb);
}
public org.omg.CORBA.ORB orb() {
return this.orb;
}
// org.omg.CORBA_2_3.portable.InputStream
public java.io.Serializable read_value() {
if (!markOn && !(markedItemQ.isEmpty())) { // dequeue
return (Serializable) markedItemQ.removeFirst();
}
if (markOn && !(markedItemQ.isEmpty()) &&
(peekIndex < peekCount)) { // peek
return (Serializable) markedItemQ.get(peekIndex++);
}
try {
Serializable value = (java.io.Serializable) is.readObject();
if (markOn) { // enqueue
markedItemQ.addLast(value);
}
return value;
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "read_value");
}
}
public java.io.Serializable read_value(java.lang.Class clz) {
return read_value();
}
public java.io.Serializable read_value(
org.omg.CORBA.portable.BoxedValueHelper factory) {
return read_value();
}
public java.io.Serializable read_value(java.lang.String rep_id) {
return read_value();
}
public java.io.Serializable read_value(java.io.Serializable value) {
return read_value();
}
public java.lang.Object read_abstract_interface() {
return read_abstract_interface(null);
}
public java.lang.Object read_abstract_interface(java.lang.Class clz) {
boolean isObject = read_boolean();
if (isObject) {
return read_Object(clz);
} else {
return read_value();
}
}
// com.sun.corba.se.impl.encoding.MarshalInputStream
public void consumeEndian() {
throw wrapper.giopVersionError();
}
public int getPosition() {
try {
return bis.getPosition();
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "getPosition");
}
}
// org.omg.CORBA.DataInputStream
public java.lang.Object read_Abstract() {
return read_abstract_interface();
}
public java.io.Serializable read_Value() {
return read_value();
}
public void read_any_array (org.omg.CORBA.AnySeqHolder seq,
int offset, int length) {
read_any_array(seq.value, offset, length);
}
private final void read_any_array(org.omg.CORBA.Any[] value,
int offset, int length) {
for(int i=0; i < length; i++) {
value[i+offset] = read_any();
}
}
public void read_boolean_array (org.omg.CORBA.BooleanSeqHolder seq,
int offset, int length){
read_boolean_array(seq.value, offset, length);
}
public void read_char_array (org.omg.CORBA.CharSeqHolder seq,
int offset, int length){
read_char_array(seq.value, offset, length);
}
public void read_wchar_array (org.omg.CORBA.WCharSeqHolder seq,
int offset, int length){
read_wchar_array(seq.value, offset, length);
}
public void read_octet_array (org.omg.CORBA.OctetSeqHolder seq,
int offset, int length){
read_octet_array(seq.value, offset, length);
}
public void read_short_array (org.omg.CORBA.ShortSeqHolder seq,
int offset, int length){
read_short_array(seq.value, offset, length);
}
public void read_ushort_array (org.omg.CORBA.UShortSeqHolder seq,
int offset, int length){
read_ushort_array(seq.value, offset, length);
}
public void read_long_array (org.omg.CORBA.LongSeqHolder seq,
int offset, int length){
read_long_array(seq.value, offset, length);
}
public void read_ulong_array (org.omg.CORBA.ULongSeqHolder seq,
int offset, int length){
read_ulong_array(seq.value, offset, length);
}
public void read_ulonglong_array (org.omg.CORBA.ULongLongSeqHolder seq,
int offset, int length){
read_ulonglong_array(seq.value, offset, length);
}
public void read_longlong_array (org.omg.CORBA.LongLongSeqHolder seq,
int offset, int length){
read_longlong_array(seq.value, offset, length);
}
public void read_float_array (org.omg.CORBA.FloatSeqHolder seq,
int offset, int length){
read_float_array(seq.value, offset, length);
}
public void read_double_array (org.omg.CORBA.DoubleSeqHolder seq,
int offset, int length){
read_double_array(seq.value, offset, length);
}
// org.omg.CORBA.portable.ValueBase
public String[] _truncatable_ids() {
throw wrapper.giopVersionError();
}
// java.io.InputStream
// REVISIT - should we make these throw UnsupportedOperationExceptions?
// Right now, they'll go up to the java.io versions!
// public int read(byte b[]) throws IOException;
// public int read(byte b[], int off, int len) throws IOException
// public long skip(long n) throws IOException;
// public int available() throws IOException;
// public void close() throws IOException;
public void mark(int readLimit) {
// Nested mark disallowed.
// Further, mark is not supported until first 16 bytes are read.
if (markOn || is == null) {
throw wrapper.javaSerializationException("mark");
}
markOn = true;
if (!(markedItemQ.isEmpty())) {
peekIndex = 0;
peekCount = markedItemQ.size();
}
/*
// Note: only ByteArrayInputStream supports mark/reset.
if (is == null || is.markSupported() == false) {
throw wrapper.javaSerializationException("mark");
}
is.mark(readLimit);
*/
}
public void reset() {
markOn = false;
peekIndex = 0;
peekCount = 0;
/*
// Note: only ByteArrayInputStream supports mark/reset.
if (is == null || is.markSupported() == false) {
throw wrapper.javaSerializationException("mark");
}
try {
is.reset();
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "reset");
}
*/
}
// This should return false so that outside users (people using the JDK)
// don't have any guarantees that mark/reset will work in their
// custom marshaling code. This is necessary since they could do things
// like expect obj1a == obj1b in the following code:
//
// is.mark(10000);
// Object obj1a = is.readObject();
// is.reset();
// Object obj1b = is.readObject();
//
public boolean markSupported() {
return true;
}
// Needed by AnyImpl and ServiceContexts
public CDRInputStreamBase dup() {
CDRInputStreamBase result = null ;
try {
result = (CDRInputStreamBase) this.getClass().newInstance();
} catch (Exception e) {
throw wrapper.couldNotDuplicateCdrInputStream(e);
}
result.init(this.orb, this.buffer, this.bufSize, false, null);
// Set the buffer position.
((IDLJavaSerializationInputStream)result).skipBytes(getPosition());
// Set mark related data.
((IDLJavaSerializationInputStream)result).
setMarkData(markOn, peekIndex, peekCount,
(LinkedList) markedItemQ.clone());
return result;
}
// Used exclusively by the dup() method.
void skipBytes(int len) {
try {
is.skipBytes(len);
} catch (Exception e) {
throw wrapper.javaSerializationException(e, "skipBytes");
}
}
// Used exclusively by the dup() method.
void setMarkData(boolean markOn, int peekIndex, int peekCount,
LinkedList markedItemQ) {
this.markOn = markOn;
this.peekIndex = peekIndex;
this.peekCount = peekCount;
this.markedItemQ = markedItemQ;
}
// Needed by TCUtility
public java.math.BigDecimal read_fixed(short digits, short scale) {
// digits isn't really needed here
StringBuffer buffer = read_fixed_buffer();
if (digits != buffer.length())
throw wrapper.badFixed( new Integer(digits),
new Integer(buffer.length()) ) ;
buffer.insert(digits - scale, '.');
return new BigDecimal(buffer.toString());
}
// Needed by TypeCodeImpl
public boolean isLittleEndian() {
throw wrapper.giopVersionError();
}
// Needed by request and reply messages for GIOP versions >= 1.2 only.
void setHeaderPadding(boolean headerPadding) {
// no-op. We don't care about body alignment while using
// Java serialization. What the GIOP spec states does not apply here.
}
// Needed by IIOPInputStream and other subclasses
public ByteBuffer getByteBuffer() {
throw wrapper.giopVersionError();
}
public void setByteBuffer(ByteBuffer byteBuffer) {
throw wrapper.giopVersionError();
}
public void setByteBufferWithInfo(ByteBufferWithInfo bbwi) {
throw wrapper.giopVersionError();
}
public int getBufferLength() {
return bufSize;
}
public void setBufferLength(int value) {
// this is redundant, since buffer size was already specified
// as part of the init call. So, ignore.
}
public int getIndex() {
return bis.getPosition();
}
public void setIndex(int value) {
try {
bis.setPosition(value);
} catch (IndexOutOfBoundsException e) {
throw wrapper.javaSerializationException(e, "setIndex");
}
}
public void orb(org.omg.CORBA.ORB orb) {
this.orb = (ORB) orb;
}
public BufferManagerRead getBufferManager() {
return bufferManager;
}
public GIOPVersion getGIOPVersion() {
return GIOPVersion.V1_2;
}
com.sun.org.omg.SendingContext.CodeBase getCodeBase() {
return parent.getCodeBase();
}
void printBuffer() {
byte[] buf = this.buffer.array();
System.out.println("+++++++ Input Buffer ++++++++");
System.out.println();
System.out.println("Current position: " + getPosition());
System.out.println("Total length : " + this.bufSize);
System.out.println();
char[] charBuf = new char[16];
try {
for (int i = 0; i < buf.length; i += 16) {
int j = 0;
// For every 16 bytes, there is one line
// of output. First, the hex output of
// the 16 bytes with each byte separated
// by a space.
while (j < 16 && j + i < buf.length) {
int k = buf[i + j];
if (k < 0)
k = 256 + k;
String hex = Integer.toHexString(k);
if (hex.length() == 1)
hex = "0" + hex;
System.out.print(hex + " ");
j++;
}
// Add any extra spaces to align the
// text column in case we didn't end
// at 16
while (j < 16) {
System.out.print(" ");
j++;
}
// Now output the ASCII equivalents. Non-ASCII
// characters are shown as periods.
int x = 0;
while (x < 16 && x + i < buf.length) {
if (ORBUtility.isPrintable((char)buf[i + x])) {
charBuf[x] = (char) buf[i + x];
} else {
charBuf[x] = '.';
}
x++;
}
System.out.println(new String(charBuf, 0, x));
}
} catch (Throwable t) {
t.printStackTrace();
}
System.out.println("++++++++++++++++++++++++++++++");
}
void alignOnBoundary(int octetBoundary) {
throw wrapper.giopVersionError();
}
void performORBVersionSpecificInit() {
// No-op.
}
public void resetCodeSetConverters() {
// No-op.
}
// ValueInputStream -------------------------
public void start_value() {
throw wrapper.giopVersionError();
}
public void end_value() {
throw wrapper.giopVersionError();
}
}
|
apache/derby | 32,239 | java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/lang/SystemCatalogTest.java | /*
Derby - Class org.apache.derbyTesting.functionTests.tests.lang.SystemCatalogTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.lang;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import junit.framework.Test;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Tests concerning the system catalogs.
*
* Retaining comment from previous .sql test:
* RESOLVE - add selects from sysdepends when simplified
*
*/
public class SystemCatalogTest extends BaseJDBCTestCase {
public SystemCatalogTest(String name) {
super(name);
}
public static Test suite() {
Test suite = TestConfiguration.defaultSuite(SystemCatalogTest.class);
return TestConfiguration.singleUseDatabaseDecorator(suite);
}
/**
* Test that the user cannot execute any DDL statements on the system tables.
* @throws SQLException
*/
public void testNoUserDDLOnSystemTables() throws SQLException {
Statement s = createStatement();
assertStatementError("X0Y56", s, "drop table sys.systables");
assertStatementError("42X62", s, "drop index sys.sysaliases_index2");
assertStatementError("42X62", s, "create index trash on sys.systables(tableid)");
assertStatementError("42X62", s, "create table sys.usertable(c1 int)");
assertStatementError("42X62", s, "create view sys.userview as select * from sys.systables");
assertStatementError("42X62", s, "alter table sys.systables drop column tablename");
assertStatementError("42X62", s, "alter table sys.systables add column foo int");
assertStatementError("42X62", s, "alter table sys.systables alter column tablename null");
assertStatementError("42X62", s, "alter table sys.systables drop primary key");
s.close();
}
/**
* Test that the system tables cannot be changed by various DML statements.
*
* @throws SQLException
*/
public void testSystemCatalogsNotUpdatable() throws SQLException{
Connection c = getConnection();
Statement s = c.createStatement();
c.setAutoCommit(false);
try{
s.executeUpdate("delete from sys.systables");
} catch (SQLException e)
{
assertSQLState("42Y25", e);
}
try{
s.executeUpdate("update sys.systables set tablename = tablename || 'trash'");
} catch (SQLException e)
{
assertSQLState("42Y25", e);
}
try{
s.executeUpdate("insert into sys.systables select * from sys.systables");
} catch (SQLException e)
{
assertSQLState("42Y25", e);
}
try{
ResultSet rs = s.executeQuery("select tablename from sys.systables for update of tablename");
} catch (SQLException e)
{
assertSQLState("42Y90", e);
}
c.rollback();
c.setAutoCommit(true);
}
/**
* Test various default store properties for the system tables.
*
* @throws SQLException
*/
public void testSystemCatalogStoreProperties() throws SQLException{
Statement s = createStatement();
s.execute("create function gatp(SCH VARCHAR(128), TBL VARCHAR(128)) RETURNS VARCHAR(1000) EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.TestPropertyInfo.getAllTableProperties' LANGUAGE JAVA PARAMETER STYLE JAVA");
s.execute("create function gaip(SCH VARCHAR(128), TBL VARCHAR(128)) RETURNS VARCHAR(1000) EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.TestPropertyInfo.getAllIndexProperties' LANGUAGE JAVA PARAMETER STYLE JAVA");
// get the properties for the heaps
ResultSet rs = s.executeQuery("select tablename,gatp('SYS', tablename) from sys.systables order by tablename");
boolean nonEmptyResultSet = false;
String tablename = null;
String sysdummy = "SYSDUMMY1";
String heapResult = "{ derby.storage.initialPages=1, derby.storage.minimumRecordSize=12, derby.storage.pageReservedSpace=0, derby.storage.pageSize=4096, derby.storage.reusableRecordId=false }";
while(rs.next()) {
nonEmptyResultSet = true;
tablename = rs.getString(1);
if (tablename.equals(sysdummy)) {
assertTrue(rs.getString(2).startsWith("{ }"));
} else {
assertTrue(rs.getString(2).startsWith(heapResult));
}
}
assertTrue(nonEmptyResultSet);
rs.close();
// get the properties for the indexes
rs = s.executeQuery("select conglomeratename, gaip('SYS', conglomeratename) from sys.sysconglomerates where isindex order by conglomeratename");
nonEmptyResultSet = false;
String indexResult = "{ derby.storage.initialPages=1, derby.storage.minimumRecordSize=1, derby.storage.pageReservedSpace=0, derby.storage.pageSize=4096, derby.storage.reusableRecordId=true }";
while(rs.next()) {
nonEmptyResultSet = true;
assertTrue(rs.getString(2).startsWith(indexResult));
}
assertTrue(nonEmptyResultSet);
rs.close();
s.close();
}
/**
* Test that each system table has a table type of "S".
*
* @throws SQLException
*/
public void testSystemCatalogTableTypes() throws SQLException {
Statement s = createStatement();
ResultSet rs = s.executeQuery("select TABLENAME, TABLETYPE from sys.systables order by tablename");
boolean nonEmptyResultSet = false;
while(rs.next()) {
nonEmptyResultSet = true;
assertEquals("S", rs.getString(2));
}
assertTrue(nonEmptyResultSet);
rs.close();
s.close();
}
/**
* Check that all the tables for their expected columns.
*
* @throws SQLException
*/
public void testSystemCatalogColumns() throws SQLException {
String [][] expected = {
{"SYSALIASES", "ALIAS", "2", "VARCHAR(128) NOT NULL"},
{"SYSALIASES", "ALIASID", "1", "CHAR(36) NOT NULL"},
{"SYSALIASES", "ALIASINFO", "8", "org.apache.derby.catalog.AliasInfo"},
{"SYSALIASES", "ALIASTYPE", "5", "CHAR(1) NOT NULL"},
{"SYSALIASES", "JAVACLASSNAME", "4", "LONG VARCHAR NOT NULL"},
{"SYSALIASES", "NAMESPACE", "6", "CHAR(1) NOT NULL"},
{"SYSALIASES", "SCHEMAID", "3", "CHAR(36)"},
{"SYSALIASES", "SPECIFICNAME", "9", "VARCHAR(128) NOT NULL"},
{"SYSALIASES", "SYSTEMALIAS", "7", "BOOLEAN NOT NULL"},
{"SYSCHECKS", "CHECKDEFINITION", "2", "LONG VARCHAR NOT NULL"},
{"SYSCHECKS", "CONSTRAINTID", "1", "CHAR(36) NOT NULL"},
{"SYSCHECKS", "REFERENCEDCOLUMNS", "3", "org.apache.derby.catalog.ReferencedColumns NOT NULL"},
{"SYSCOLPERMS", "COLPERMSID", "1", "CHAR(36) NOT NULL"},
{"SYSCOLPERMS", "COLUMNS", "6", "org.apache.derby.iapi.services.io.FormatableBitSet NOT NULL"},
{"SYSCOLPERMS", "GRANTEE", "2", "VARCHAR(128) NOT NULL"},
{"SYSCOLPERMS", "GRANTOR", "3", "VARCHAR(128) NOT NULL"},
{"SYSCOLPERMS", "TABLEID", "4", "CHAR(36) NOT NULL"},
{"SYSCOLPERMS", "TYPE", "5", "CHAR(1) NOT NULL"},
{"SYSCOLUMNS", "AUTOINCREMENTCYCLE", "10", "BOOLEAN"},
{"SYSCOLUMNS", "AUTOINCREMENTINC", "9", "BIGINT"},
{"SYSCOLUMNS", "AUTOINCREMENTSTART", "8", "BIGINT"},
{"SYSCOLUMNS", "AUTOINCREMENTVALUE", "7", "BIGINT"},
{"SYSCOLUMNS", "COLUMNDATATYPE", "4", "org.apache.derby.catalog.TypeDescriptor NOT NULL"},
{"SYSCOLUMNS", "COLUMNDEFAULT", "5", "java.io.Serializable"},
{"SYSCOLUMNS", "COLUMNDEFAULTID", "6", "CHAR(36)"},
{"SYSCOLUMNS", "COLUMNNAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSCOLUMNS", "COLUMNNUMBER", "3", "INTEGER NOT NULL"},
{"SYSCOLUMNS", "REFERENCEID", "1", "CHAR(36) NOT NULL"},
{"SYSCONGLOMERATES", "CONGLOMERATEID", "8", "CHAR(36) NOT NULL"},
{"SYSCONGLOMERATES", "CONGLOMERATENAME", "4", "VARCHAR(128)"},
{"SYSCONGLOMERATES", "CONGLOMERATENUMBER", "3", "BIGINT NOT NULL"},
{"SYSCONGLOMERATES", "DESCRIPTOR", "6", "org.apache.derby.catalog.IndexDescriptor"},
{"SYSCONGLOMERATES", "ISCONSTRAINT", "7", "BOOLEAN"},
{"SYSCONGLOMERATES", "ISINDEX", "5", "BOOLEAN NOT NULL"},
{"SYSCONGLOMERATES", "SCHEMAID", "1", "CHAR(36) NOT NULL"},
{"SYSCONGLOMERATES", "TABLEID", "2", "CHAR(36) NOT NULL"},
{"SYSCONSTRAINTS", "CONSTRAINTID", "1", "CHAR(36) NOT NULL"},
{"SYSCONSTRAINTS", "CONSTRAINTNAME", "3", "VARCHAR(128) NOT NULL"},
{"SYSCONSTRAINTS", "REFERENCECOUNT", "7", "INTEGER NOT NULL"},
{"SYSCONSTRAINTS", "SCHEMAID", "5", "CHAR(36) NOT NULL"},
{"SYSCONSTRAINTS", "STATE", "6", "CHAR(1) NOT NULL"},
{"SYSCONSTRAINTS", "TABLEID", "2", "CHAR(36) NOT NULL"},
{"SYSCONSTRAINTS", "TYPE", "4", "CHAR(1) NOT NULL"},
{"SYSDEPENDS", "DEPENDENTFINDER", "2", "org.apache.derby.catalog.DependableFinder NOT NULL"},
{"SYSDEPENDS", "DEPENDENTID", "1", "CHAR(36) NOT NULL"},
{"SYSDEPENDS", "PROVIDERFINDER", "4", "org.apache.derby.catalog.DependableFinder NOT NULL"},
{"SYSDEPENDS", "PROVIDERID", "3", "CHAR(36) NOT NULL"},
{"SYSDUMMY1", "IBMREQD", "1", "CHAR(1)"},
{"SYSFILES", "FILEID", "1", "CHAR(36) NOT NULL"},
{"SYSFILES", "FILENAME", "3", "VARCHAR(128) NOT NULL"},
{"SYSFILES", "GENERATIONID", "4", "BIGINT NOT NULL"},
{"SYSFILES", "SCHEMAID", "2", "CHAR(36) NOT NULL"},
{"SYSFOREIGNKEYS", "CONGLOMERATEID", "2", "CHAR(36) NOT NULL"},
{"SYSFOREIGNKEYS", "CONSTRAINTID", "1", "CHAR(36) NOT NULL"},
{"SYSFOREIGNKEYS", "DELETERULE", "4", "CHAR(1) NOT NULL"},
{"SYSFOREIGNKEYS", "KEYCONSTRAINTID", "3", "CHAR(36) NOT NULL"},
{"SYSFOREIGNKEYS", "UPDATERULE", "5", "CHAR(1) NOT NULL"},
{"SYSKEYS", "CONGLOMERATEID", "2", "CHAR(36) NOT NULL"},
{"SYSKEYS", "CONSTRAINTID", "1", "CHAR(36) NOT NULL"},
{"SYSPERMS", "GRANTEE", "6", "VARCHAR(128) NOT NULL"},
{"SYSPERMS", "GRANTOR", "5", "VARCHAR(128) NOT NULL"},
{"SYSPERMS", "ISGRANTABLE", "7", "CHAR(1) NOT NULL"},
{"SYSPERMS", "OBJECTID", "3", "CHAR(36) NOT NULL"},
{"SYSPERMS", "OBJECTTYPE", "2", "VARCHAR(36) NOT NULL"},
{"SYSPERMS", "PERMISSION", "4", "CHAR(36) NOT NULL"},
{"SYSPERMS", "UUID", "1", "CHAR(36) NOT NULL"},
{"SYSROLES", "GRANTEE", "3", "VARCHAR(128) NOT NULL"},
{"SYSROLES", "GRANTOR", "4", "VARCHAR(128) NOT NULL"},
{"SYSROLES", "ISDEF", "6", "CHAR(1) NOT NULL"},
{"SYSROLES", "ROLEID", "2", "VARCHAR(128) NOT NULL"},
{"SYSROLES", "UUID", "1", "CHAR(36) NOT NULL"},
{"SYSROLES", "WITHADMINOPTION", "5", "CHAR(1) NOT NULL"},
{"SYSROUTINEPERMS", "ALIASID", "4", "CHAR(36) NOT NULL"},
{"SYSROUTINEPERMS", "GRANTEE", "2", "VARCHAR(128) NOT NULL"},
{"SYSROUTINEPERMS", "GRANTOPTION", "5", "CHAR(1) NOT NULL"},
{"SYSROUTINEPERMS", "GRANTOR", "3", "VARCHAR(128) NOT NULL"},
{"SYSROUTINEPERMS", "ROUTINEPERMSID", "1", "CHAR(36) NOT NULL"},
{"SYSSCHEMAS", "AUTHORIZATIONID", "3", "VARCHAR(128) NOT NULL"},
{"SYSSCHEMAS", "SCHEMAID", "1", "CHAR(36) NOT NULL"},
{"SYSSCHEMAS", "SCHEMANAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSSEQUENCES", "CURRENTVALUE", "5", "BIGINT"},
{"SYSSEQUENCES", "CYCLEOPTION", "10", "CHAR(1) NOT NULL"},
{"SYSSEQUENCES", "INCREMENT", "9", "BIGINT NOT NULL"},
{"SYSSEQUENCES", "MAXIMUMVALUE", "8", "BIGINT NOT NULL"},
{"SYSSEQUENCES", "MINIMUMVALUE", "7", "BIGINT NOT NULL"},
{"SYSSEQUENCES", "SCHEMAID", "3", "CHAR(36) NOT NULL"},
{"SYSSEQUENCES", "SEQUENCEDATATYPE", "4", "org.apache.derby.catalog.TypeDescriptor NOT NULL"},
{"SYSSEQUENCES", "SEQUENCEID", "1", "CHAR(36) NOT NULL"},
{"SYSSEQUENCES", "SEQUENCENAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSSEQUENCES", "STARTVALUE", "6", "BIGINT NOT NULL"},
{"SYSSTATEMENTS", "COMPILATIONSCHEMAID", "8", "CHAR(36)"},
{"SYSSTATEMENTS", "LASTCOMPILED", "7", "TIMESTAMP"},
{"SYSSTATEMENTS", "SCHEMAID", "3", "CHAR(36) NOT NULL"},
{"SYSSTATEMENTS", "STMTID", "1", "CHAR(36) NOT NULL"},
{"SYSSTATEMENTS", "STMTNAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSSTATEMENTS", "TEXT", "6", "LONG VARCHAR NOT NULL"},
{"SYSSTATEMENTS", "TYPE", "4", "CHAR(1) NOT NULL"},
{"SYSSTATEMENTS", "USINGTEXT", "9", "LONG VARCHAR"},
{"SYSSTATEMENTS", "VALID", "5", "BOOLEAN NOT NULL"},
{"SYSSTATISTICS", "COLCOUNT", "7", "INTEGER NOT NULL"},
{"SYSSTATISTICS", "CREATIONTIMESTAMP", "4", "TIMESTAMP NOT NULL"},
{"SYSSTATISTICS", "REFERENCEID", "2", "CHAR(36) NOT NULL"},
{"SYSSTATISTICS", "STATID", "1", "CHAR(36) NOT NULL"},
{"SYSSTATISTICS", "STATISTICS", "8", "org.apache.derby.catalog.Statistics NOT NULL"},
{"SYSSTATISTICS", "TABLEID", "3", "CHAR(36) NOT NULL"},
{"SYSSTATISTICS", "TYPE", "5", "CHAR(1) NOT NULL"},
{"SYSSTATISTICS", "VALID", "6", "BOOLEAN NOT NULL"},
{"SYSTABLEPERMS", "DELETEPRIV", "6", "CHAR(1) NOT NULL"},
{"SYSTABLEPERMS", "GRANTEE", "2", "VARCHAR(128) NOT NULL"},
{"SYSTABLEPERMS", "GRANTOR", "3", "VARCHAR(128) NOT NULL"},
{"SYSTABLEPERMS", "INSERTPRIV", "7", "CHAR(1) NOT NULL"},
{"SYSTABLEPERMS", "REFERENCESPRIV", "9", "CHAR(1) NOT NULL"},
{"SYSTABLEPERMS", "SELECTPRIV", "5", "CHAR(1) NOT NULL"},
{"SYSTABLEPERMS", "TABLEID", "4", "CHAR(36) NOT NULL"},
{"SYSTABLEPERMS", "TABLEPERMSID", "1", "CHAR(36) NOT NULL"},
{"SYSTABLEPERMS", "TRIGGERPRIV", "10", "CHAR(1) NOT NULL"},
{"SYSTABLEPERMS", "UPDATEPRIV", "8", "CHAR(1) NOT NULL"},
{"SYSTABLES", "LOCKGRANULARITY", "5", "CHAR(1) NOT NULL"},
{"SYSTABLES", "SCHEMAID", "4", "CHAR(36) NOT NULL"},
{"SYSTABLES", "TABLEID", "1", "CHAR(36) NOT NULL"},
{"SYSTABLES", "TABLENAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSTABLES", "TABLETYPE", "3", "CHAR(1) NOT NULL"},
{"SYSTRIGGERS", "ACTIONSTMTID", "11", "CHAR(36)"},
{"SYSTRIGGERS", "CREATIONTIMESTAMP", "4", "TIMESTAMP NOT NULL"},
{"SYSTRIGGERS", "EVENT", "5", "CHAR(1) NOT NULL"},
{"SYSTRIGGERS", "FIRINGTIME", "6", "CHAR(1) NOT NULL"},
{"SYSTRIGGERS", "NEWREFERENCINGNAME", "17", "VARCHAR(128)"},
{"SYSTRIGGERS", "OLDREFERENCINGNAME", "16", "VARCHAR(128)"},
{"SYSTRIGGERS", "REFERENCEDCOLUMNS", "12", "org.apache.derby.catalog.ReferencedColumns"},
{"SYSTRIGGERS", "REFERENCINGNEW", "15", "BOOLEAN"},
{"SYSTRIGGERS", "REFERENCINGOLD", "14", "BOOLEAN"},
{"SYSTRIGGERS", "SCHEMAID", "3", "CHAR(36) NOT NULL"},
{"SYSTRIGGERS", "STATE", "8", "CHAR(1) NOT NULL"},
{"SYSTRIGGERS", "TABLEID", "9", "CHAR(36) NOT NULL"},
{"SYSTRIGGERS", "TRIGGERDEFINITION", "13", "LONG VARCHAR"},
{"SYSTRIGGERS", "TRIGGERID", "1", "CHAR(36) NOT NULL"},
{"SYSTRIGGERS", "TRIGGERNAME", "2", "VARCHAR(128) NOT NULL"},
{"SYSTRIGGERS", "TYPE", "7", "CHAR(1) NOT NULL"},
{"SYSTRIGGERS", "WHENCLAUSETEXT", "18", "LONG VARCHAR"},
{"SYSTRIGGERS", "WHENSTMTID", "10", "CHAR(36)"},
{"SYSUSERS", "HASHINGSCHEME", "2", "VARCHAR(32672) NOT NULL"},
{"SYSUSERS", "LASTMODIFIED", "4", "TIMESTAMP NOT NULL"},
{"SYSUSERS", "PASSWORD", "3", "VARCHAR(32672) NOT NULL"},
{"SYSUSERS", "USERNAME", "1", "VARCHAR(128) NOT NULL"},
{"SYSVIEWS", "CHECKOPTION", "3", "CHAR(1) NOT NULL"},
{"SYSVIEWS", "COMPILATIONSCHEMAID", "4", "CHAR(36)"},
{"SYSVIEWS", "TABLEID", "1", "CHAR(36) NOT NULL"},
{"SYSVIEWS", "VIEWDEFINITION", "2", "LONG VARCHAR NOT NULL"}
};
Statement s = createStatement();
ResultSet rs = s.executeQuery("select TABLENAME, COLUMNNAME, COLUMNNUMBER, COLUMNDATATYPE from sys.systables t, sys.syscolumns c" +
" where t.TABLEID=c.REFERENCEID order by TABLENAME, COLUMNNAME");
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.close();
}
public void testSystemCatalogIndexes() throws SQLException{
String [][] expected =
{
{"SYSALIASES", "SYSALIASES_HEAP", "false"},
{"SYSALIASES", "SYSALIASES_INDEX3", "true"},
{"SYSALIASES", "SYSALIASES_INDEX2", "true"},
{"SYSALIASES", "SYSALIASES_INDEX1", "true"},
{"SYSCHECKS", "SYSCHECKS_HEAP", "false"},
{"SYSCHECKS", "SYSCHECKS_INDEX1", "true"},
{"SYSCOLPERMS", "SYSCOLPERMS_HEAP", "false"},
{"SYSCOLPERMS", "SYSCOLPERMS_INDEX3", "true"},
{"SYSCOLPERMS", "SYSCOLPERMS_INDEX2", "true"},
{"SYSCOLPERMS", "SYSCOLPERMS_INDEX1", "true"},
{"SYSCOLUMNS", "SYSCOLUMNS_HEAP", "false"},
{"SYSCOLUMNS", "SYSCOLUMNS_INDEX2", "true"},
{"SYSCOLUMNS", "SYSCOLUMNS_INDEX1", "true"},
{"SYSCONGLOMERATES", "SYSCONGLOMERATES_HEAP", "false"},
{"SYSCONGLOMERATES", "SYSCONGLOMERATES_INDEX3", "true"},
{"SYSCONGLOMERATES", "SYSCONGLOMERATES_INDEX2", "true"},
{"SYSCONGLOMERATES", "SYSCONGLOMERATES_INDEX1", "true"},
{"SYSCONSTRAINTS", "SYSCONSTRAINTS_HEAP", "false"},
{"SYSCONSTRAINTS", "SYSCONSTRAINTS_INDEX3", "true"},
{"SYSCONSTRAINTS", "SYSCONSTRAINTS_INDEX2", "true"},
{"SYSCONSTRAINTS", "SYSCONSTRAINTS_INDEX1", "true"},
{"SYSDEPENDS", "SYSDEPENDS_HEAP", "false"},
{"SYSDEPENDS", "SYSDEPENDS_INDEX2", "true"},
{"SYSDEPENDS", "SYSDEPENDS_INDEX1", "true"},
{"SYSDUMMY1", "SYSDUMMY1_HEAP", "false"},
{"SYSFILES", "SYSFILES_HEAP", "false"},
{"SYSFILES", "SYSFILES_INDEX2", "true"},
{"SYSFILES", "SYSFILES_INDEX1", "true"},
{"SYSFOREIGNKEYS", "SYSFOREIGNKEYS_HEAP", "false"},
{"SYSFOREIGNKEYS", "SYSFOREIGNKEYS_INDEX2", "true"},
{"SYSFOREIGNKEYS", "SYSFOREIGNKEYS_INDEX1", "true"},
{"SYSKEYS", "SYSKEYS_HEAP", "false"},
{"SYSKEYS", "SYSKEYS_INDEX1", "true"},
{"SYSPERMS", "SYSPERMS_HEAP", "false"},
{"SYSPERMS", "SYSPERMS_INDEX3", "true"},
{"SYSPERMS", "SYSPERMS_INDEX2", "true"},
{"SYSPERMS", "SYSPERMS_INDEX1", "true"},
{"SYSROLES", "SYSROLES_HEAP", "false"},
{"SYSROLES", "SYSROLES_INDEX3", "true"},
{"SYSROLES", "SYSROLES_INDEX2", "true"},
{"SYSROLES", "SYSROLES_INDEX1", "true"},
{"SYSROUTINEPERMS", "SYSROUTINEPERMS_HEAP", "false"},
{"SYSROUTINEPERMS", "SYSROUTINEPERMS_INDEX3", "true"},
{"SYSROUTINEPERMS", "SYSROUTINEPERMS_INDEX2", "true"},
{"SYSROUTINEPERMS", "SYSROUTINEPERMS_INDEX1", "true"},
{"SYSSCHEMAS", "SYSSCHEMAS_HEAP", "false"},
{"SYSSCHEMAS", "SYSSCHEMAS_INDEX2", "true"},
{"SYSSCHEMAS", "SYSSCHEMAS_INDEX1", "true"},
{"SYSSEQUENCES", "SYSSEQUENCES_HEAP", "false"},
{"SYSSEQUENCES", "SYSSEQUENCES_INDEX2", "true"},
{"SYSSEQUENCES", "SYSSEQUENCES_INDEX1", "true"},
{"SYSSTATEMENTS", "SYSSTATEMENTS_HEAP", "false"},
{"SYSSTATEMENTS", "SYSSTATEMENTS_INDEX2", "true"},
{"SYSSTATEMENTS", "SYSSTATEMENTS_INDEX1", "true"},
{"SYSSTATISTICS", "SYSSTATISTICS_HEAP", "false"},
{"SYSSTATISTICS", "SYSSTATISTICS_INDEX1", "true"},
{"SYSTABLEPERMS", "SYSTABLEPERMS_HEAP", "false"},
{"SYSTABLEPERMS", "SYSTABLEPERMS_INDEX3", "true"},
{"SYSTABLEPERMS", "SYSTABLEPERMS_INDEX2", "true"},
{"SYSTABLEPERMS", "SYSTABLEPERMS_INDEX1", "true"},
{"SYSTABLES", "SYSTABLES_HEAP", "false"},
{"SYSTABLES", "SYSTABLES_INDEX2", "true"},
{"SYSTABLES", "SYSTABLES_INDEX1", "true"},
{"SYSTRIGGERS", "SYSTRIGGERS_HEAP", "false"},
{"SYSTRIGGERS", "SYSTRIGGERS_INDEX3", "true"},
{"SYSTRIGGERS", "SYSTRIGGERS_INDEX2", "true"},
{"SYSTRIGGERS", "SYSTRIGGERS_INDEX1", "true"},
{"SYSUSERS", "SYSUSERS_HEAP", "false"},
{"SYSUSERS", "SYSUSERS_INDEX1", "true"},
{"SYSVIEWS", "SYSVIEWS_HEAP", "false"},
{"SYSVIEWS", "SYSVIEWS_INDEX1", "true"},
};
Statement s = createStatement();
ResultSet rs = s.executeQuery("select TABLENAME, CONGLOMERATENAME, ISINDEX from sys.systables t, sys.sysconglomerates c"
+ " where t.TABLEID=c.TABLEID order by TABLENAME, ISINDEX");
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.close();
}
/**
* Check that a newly created table and its columns appear in SYSTABLES and SYSCOLUMNS
* @throws SQLException
*/
public void testNewTableInSystemCatalogs() throws SQLException {
Statement s = createStatement();
s.execute("create table t (i int, s smallint)");
ResultSet rs = s.executeQuery("select TABLETYPE from sys.systables where tablename = 'T'");
JDBC.assertSingleValueResultSet(rs, "T");
rs.close();
rs = s.executeQuery("select TABLENAME, COLUMNNAME, COLUMNNUMBER, columndatatype from sys.systables t, sys.syscolumns c" +
" where t.TABLEID=c.REFERENCEID and t.tablename = 'T' order by TABLENAME, COLUMNNAME");
String[][] expected = {{"T", "I", "1", "INTEGER"}, {"T", "S", "2", "SMALLINT"}};
JDBC.assertFullResultSet(rs,expected);
rs.close();
rs = s.executeQuery("select TABLENAME, ISINDEX from sys.systables t, sys.sysconglomerates c where t.TABLEID=c.TABLEID and t.TABLENAME = 'T' order by TABLENAME, ISINDEX");
expected = new String[][] {{"T", "false"},};
JDBC.assertFullResultSet(rs,expected);
rs.close();
s.execute("drop table t");
s.close();
}
/**
* Test that table and column names over thirty characters are recorded
* properly in the system tables.
*
* @throws SQLException
*/
public void testOverThirtyCharsInTableName() throws SQLException {
Statement s = createStatement();
s.execute("create table t234567890123456789012345678901234567890 (c234567890123456789012345678901234567890 int)");
ResultSet rs = s.executeQuery("select TABLENAME from sys.systables where length(TABLENAME) > 30 order by tablename");
JDBC.assertSingleValueResultSet(rs, "T234567890123456789012345678901234567890");
rs.close();
rs = s.executeQuery("select COLUMNNAME from sys.syscolumns where {fn length(COLUMNNAME)} > 30 order by columnname");
JDBC.assertSingleValueResultSet(rs, "C234567890123456789012345678901234567890");
rs.close();
s.execute("drop table t234567890123456789012345678901234567890");
s.close();
}
/**
* Test that named constraints and unnamed constraints are recorded in the system tables properly.
*
* @throws SQLException
*/
public void testPrimaryAndUniqueKeysInSystemCatalogs() throws SQLException {
Statement s = createStatement();
String getNamedConstraintsQuery = "select c.constraintname, c.type from sys.sysconstraints c, sys.systables t "
+ "where c.tableid = t.tableid and not t.tablename like 'UNNAMED%' order by c.constraintname";
s.execute("create table primkey1 (c1 int not null constraint prim1 primary key)");
String [][] expected = new String[][] {{"PRIM1", "P"}};
ResultSet rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected, true);
rs.close();
s.execute("create table unnamed_primkey2 (c1 int not null primary key)");
rs = s.executeQuery("select c.constraintname, c.type from sys.sysconstraints c, sys.systables t where c.tableid = t.tableid and t.tablename = 'UNNAMED_PRIMKEY2' order by c.constraintname");
assertTrue(rs.next());
assertEquals("P", rs.getString(2));
assertFalse(rs.next());
rs.close();
rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.execute("create table primkey3 (c1 int not null, c2 int not null, constraint prim3 primary key(c2, c1))");
expected = new String[][] {{"PRIM1", "P"}, {"PRIM3", "P"}};
rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.execute("create table uniquekey1 (c1 int not null constraint uniq1 unique)");
expected = new String[][] {{"PRIM1", "P"}, {"PRIM3", "P"}, {"UNIQ1", "U"}};
rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.execute("create table unnamed_uniquekey2 (c1 int not null unique)");
rs = s.executeQuery("select c.constraintname, c.type from sys.sysconstraints c, sys.systables t where c.tableid = t.tableid and t.tablename = 'UNNAMED_UNIQUEKEY2' order by c.constraintname");
assertTrue(rs.next());
assertEquals("U", rs.getString(2));
assertFalse(rs.next());
rs.close();
rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.execute("create table uniquekey3 (c1 int not null, c2 int not null, constraint uniq3 unique(c2, c1))");
expected = new String[][] {{"PRIM1", "P"}, {"PRIM3", "P"}, {"UNIQ1", "U"}, {"UNIQ3", "U"}};
rs = s.executeQuery(getNamedConstraintsQuery);
JDBC.assertFullResultSet(rs, expected);
rs.close();
s.execute("drop table primkey1");
s.execute("drop table unnamed_primkey2");
s.execute("drop table primkey3");
s.execute("drop table uniquekey1");
s.execute("drop table unnamed_uniquekey2");
s.execute("drop table uniquekey3");
s.close();
}
/**
* Test that view creation is recorded in the system tables.
*
* @throws SQLException
*/
public void testViewsOfSystemCatalogs() throws SQLException {
Statement s = createStatement();
s.execute("create table t (i int, s smallint)");
s.execute("create table uniquekey3 (c1 int not null, c2 int not null, constraint uniq3 unique(c2, c1))");
s.execute("create view dummyview as select * from t, uniquekey3");
ResultSet rs = s.executeQuery("select tablename from sys.systables t, sys.sysviews v where t.tableid = v.tableid order by tablename");
JDBC.assertSingleValueResultSet(rs, "DUMMYVIEW");
rs.close();
s.execute("drop view dummyview");
s.execute("drop table t");
s.execute("drop table uniquekey3");
s.close();
}
/**
* This test creates a table with all supported datatypes aqnd ensures
* that bound embedded and network server return the identical datatypes
* for those datatypes. DERBY-5407
* @throws SQLException
*/
public void testColumnDatatypesOfAllDataTypesInSystemCatalogs() throws SQLException {
int totalNumOfColumnDatatypes = 21;
Statement s = createStatement();
s.execute("create table allTypesTable (" +
" a01 bigint," +
" a02 blob,\n" +
" a03 char( 1 ),\n" +
" a04 char( 1 ) for bit data ,\n" +
" a05 clob,\n" +
" a06 date,\n" +
" a07 decimal,\n" +
" a08 double,\n" +
" a09 float,\n" +
" a10 int,\n" +
" a11 long varchar,\n" +
" a12 long varchar for bit data,\n" +
" a13 numeric,\n" +
" a14 real,\n" +
" a15 smallint,\n" +
" a16 time,\n" +
" a17 timestamp,\n" +
" a18 varchar(10),\n" +
" a19 varchar(10) for bit data,\n" +
" a20 xml,\n" +
" a21 boolean\n" +
")");
ResultSet rs = s.executeQuery("select columndatatype "+
"from sys.systables, sys.syscolumns "+
"where tablename='ALLTYPESTABLE' "+
"and tableid=referenceid "+
"order by columnname");
for (int i=1; i<=totalNumOfColumnDatatypes; i++)
{
rs.next();
switch(i)
{
case 1 :
assertTrue(rs.getString(1).startsWith("BIGINT"));
break;
case 2 :
assertTrue(rs.getString(1).startsWith("BLOB(2147483647)"));
break;
case 3 :
assertTrue(rs.getString(1).startsWith("CHAR(1)"));
break;
case 4 :
assertTrue(rs.getString(1).startsWith("CHAR (1) FOR BIT DATA"));
break;
case 5 :
assertTrue(rs.getString(1).startsWith("CLOB(2147483647)"));
break;
case 6 :
assertTrue(rs.getString(1).startsWith("DATE"));
break;
case 7 :
assertTrue(rs.getString(1).startsWith("DECIMAL(5,0)"));
break;
case 8 :
assertTrue(rs.getString(1).startsWith("DOUBLE"));
break;
case 9 :
assertTrue(rs.getString(1).startsWith("DOUBLE"));
break;
case 10 :
assertTrue(rs.getString(1).startsWith("INTEGER"));
break;
case 11 :
assertTrue(rs.getString(1).startsWith("LONG VARCHAR"));
break;
case 12 :
assertTrue(rs.getString(1).startsWith("LONG VARCHAR FOR BIT DATA"));
break;
case 13 :
assertTrue(rs.getString(1).startsWith("NUMERIC(5,0)"));
break;
case 14 :
assertTrue(rs.getString(1).startsWith("REAL"));
break;
case 15 :
assertTrue(rs.getString(1).startsWith("SMALLINT"));
break;
case 16 :
assertTrue(rs.getString(1).startsWith("TIME"));
break;
case 17 :
assertTrue(rs.getString(1).startsWith("TIMESTAMP"));
break;
case 18 :
assertTrue(rs.getString(1).startsWith("VARCHAR(10)"));
break;
case 19 :
assertTrue(rs.getString(1).startsWith("VARCHAR (10) FOR BIT DATA"));
break;
case 20 :
assertTrue(rs.getString(1).startsWith("XML"));
break;
case 21 :
assertTrue(rs.getString(1).startsWith("BOOLEAN"));
break;
}
}
rs.close();
s.execute("drop table ALLTYPESTABLE");
}
/**
* Check that column datatypes are reported correctly, both in
* embedded and client/server modes
*
* @throws SQLException
*/
public void testColumnDatatypesInSystemCatalogs() throws SQLException {
Statement s = createStatement();
s.execute("create table decimal_tab (dcol decimal(5,2), ncol numeric(5,2) default 1.0)");
ResultSet rs = s.executeQuery("select columnname, columndatatype from sys.syscolumns where columnname IN ('DCOL', 'NCOL') order by columnname");
//DCOL
assertTrue(rs.next());
assertTrue(rs.getString(2).startsWith("DECIMAL(5,2)"));
//NCOL
assertTrue(rs.next());
assertTrue(rs.getString(2).startsWith("NUMERIC(5,2)"));
assertFalse(rs.next());
rs.close();
s.execute("create index decimal_tab_idx on decimal_tab(dcol)");
rs = s.executeQuery("select conglomeratename, descriptor from sys.sysconglomerates where conglomeratename = 'DECIMAL_TAB_IDX' order by conglomeratename");
assertTrue(rs.next());
assertTrue(rs.getString(2).startsWith("BTREE (1)"));
assertFalse(rs.next());
rs.close();
s.execute("create trigger t1 after update on decimal_tab for each row values 1");
rs = s.executeQuery("select triggername, referencedcolumns from sys.systriggers order by triggername");
assertTrue(rs.next());
assertNull(rs.getString(2));
assertFalse(rs.next());
rs.close();
s.execute("drop trigger t1");
s.execute("drop table decimal_tab");
s.close();
}
/**
* Test for fix of Derby-318, confirm that it is possible to select
* COLUMNDEFAULT from SYSCOLUMNS after a column that is generated by
* default has been added.
*
* @throws SQLException
*/
public void testAutoincrementColumnUpdated() throws SQLException{
Statement s = createStatement();
s.executeUpdate("create table defaultAutoinc(autoinccol int generated by default as identity)");
ResultSet rs = s.executeQuery("select COLUMNDEFAULT from SYS.SYSCOLUMNS where COLUMNNAME = 'AUTOINCCOL'");
assertTrue(rs.next());
// Before Derby-318, this next call would have failed with an NPE
Object o = rs.getObject(1);
if (! (o instanceof java.io.Serializable)) {
fail("SystemCatalogTest: invalid Object type for SYSCOLUMNS.COLUMNDEFAULT");
}
assertFalse(rs.next());
rs.close();
s.executeUpdate("drop table defaultAutoinc");
s.close();
}
/**
* Run SYSCS_UTIL.SYSCS_CHECK_TABLE on each system table.
*
* @throws SQLException
*/
public void testCheckConsistencyOfSystemCatalogs() throws SQLException {
Statement s = createStatement();
ResultSet rs = s.executeQuery("select tablename, SYSCS_UTIL.SYSCS_CHECK_TABLE('SYS', tablename)from sys.systables where tabletype = 'S' and tablename != 'SYSDUMMY1' order by tablename");
boolean nonEmptyResultSet = false;
while(rs.next()) {
nonEmptyResultSet = true;
assertEquals(rs.getInt(2), 1);
}
assertTrue(nonEmptyResultSet);
rs.close();
s.close();
}
}
|
apache/tez | 35,755 | tez-plugins/tez-history-parser/src/test/java/org/apache/tez/history/TestHistoryParser.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.history;
import static org.junit.Assert.*;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.tez.client.CallerContext;
import org.apache.tez.client.TezClient;
import org.apache.tez.common.counters.DAGCounter;
import org.apache.tez.common.counters.TaskCounter;
import org.apache.tez.common.counters.TezCounter;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.DataSinkDescriptor;
import org.apache.tez.dag.api.DataSourceDescriptor;
import org.apache.tez.dag.api.Edge;
import org.apache.tez.dag.api.EdgeProperty;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezException;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.dag.api.client.DAGClient;
import org.apache.tez.dag.api.client.StatusGetOpts;
import org.apache.tez.dag.api.event.VertexState;
import org.apache.tez.dag.api.oldrecords.TaskAttemptState;
import org.apache.tez.dag.api.oldrecords.TaskState;
import org.apache.tez.dag.app.dag.DAGState;
import org.apache.tez.dag.history.logging.ats.ATSHistoryLoggingService;
import org.apache.tez.dag.history.logging.impl.SimpleHistoryLoggingService;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.examples.WordCount;
import org.apache.tez.history.parser.ATSFileParser;
import org.apache.tez.history.parser.SimpleHistoryParser;
import org.apache.tez.history.parser.datamodel.BaseInfo;
import org.apache.tez.history.parser.datamodel.DagInfo;
import org.apache.tez.history.parser.datamodel.EdgeInfo;
import org.apache.tez.history.parser.datamodel.TaskAttemptInfo;
import org.apache.tez.history.parser.datamodel.TaskAttemptInfo.DataDependencyEvent;
import org.apache.tez.history.parser.datamodel.TaskInfo;
import org.apache.tez.history.parser.datamodel.VersionInfo;
import org.apache.tez.history.parser.datamodel.VertexInfo;
import org.apache.tez.mapreduce.input.MRInput;
import org.apache.tez.mapreduce.output.MROutput;
import org.apache.tez.mapreduce.processor.SimpleMRProcessor;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.conf.OrderedPartitionedKVEdgeConfig;
import org.apache.tez.runtime.library.input.OrderedGroupedKVInput;
import org.apache.tez.runtime.library.output.OrderedPartitionedKVOutput;
import org.apache.tez.runtime.library.partitioner.HashPartitioner;
import org.apache.tez.tests.MiniTezClusterWithTimeline;
import com.google.common.collect.Sets;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestHistoryParser {
private static MiniDFSCluster miniDFSCluster;
private static MiniTezClusterWithTimeline miniTezCluster;
//location within miniHDFS cluster's hdfs
private static Path inputLoc = new Path("/tmp/sample.txt");
private final static String INPUT = "Input";
private final static String OUTPUT = "Output";
private final static String TOKENIZER = "Tokenizer";
private final static String SUMMATION = "Summation";
private final static String SIMPLE_HISTORY_DIR = "/tmp/simplehistory/";
private final static String HISTORY_TXT = "history.txt";
private static Configuration conf = new Configuration();
private static FileSystem fs;
private static String TEST_ROOT_DIR =
"target" + Path.SEPARATOR + TestHistoryParser.class.getName() + "-tmpDir";
private static String TEZ_BASE_DIR =
"target" + Path.SEPARATOR + TestHistoryParser.class.getName() + "-tez";
private static String DOWNLOAD_DIR = TEST_ROOT_DIR + Path.SEPARATOR + "download";
private static String yarnTimelineAddress;
@BeforeClass
public static void setupCluster() throws Exception {
conf = new Configuration();
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_NOEDITLOGCHANNELFLUSH, false);
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR);
miniDFSCluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).build();
fs = miniDFSCluster.getFileSystem();
conf.set("fs.defaultFS", fs.getUri().toString());
setupTezCluster();
}
@AfterClass
public static void shutdownCluster() {
try {
if (miniDFSCluster != null) {
miniDFSCluster.shutdown();
}
if (miniTezCluster != null) {
miniTezCluster.stop();
}
} finally {
try {
FileUtils.deleteDirectory(new File(TEST_ROOT_DIR));
FileUtils.deleteDirectory(new File(TEZ_BASE_DIR));
} catch (IOException e) {
//safe to ignore
}
}
}
// @Before
public static void setupTezCluster() throws Exception {
conf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT, 3 * 1000);
conf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT, 3 * 1000);
conf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT, 2);
//Enable per edge counters
conf.setBoolean(TezConfiguration.TEZ_TASK_GENERATE_COUNTERS_PER_IO, true);
conf.setBoolean(TezConfiguration.TEZ_AM_ALLOW_DISABLED_TIMELINE_DOMAINS, true);
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.set(TezConfiguration.TEZ_HISTORY_LOGGING_SERVICE_CLASS, ATSHistoryLoggingService
.class.getName());
conf.set(TezConfiguration.TEZ_SIMPLE_HISTORY_LOGGING_DIR, SIMPLE_HISTORY_DIR);
miniTezCluster =
new MiniTezClusterWithTimeline(TEZ_BASE_DIR, 1, 1, 1, true);
miniTezCluster.init(conf);
miniTezCluster.start();
createSampleFile(inputLoc);
TezConfiguration tezConf = new TezConfiguration(miniTezCluster.getConfig());
tezConf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
tezConf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
miniTezCluster.getConfig().get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS));
tezConf.setBoolean(TezConfiguration.TEZ_AM_ALLOW_DISABLED_TIMELINE_DOMAINS, true);
tezConf.set(TezConfiguration.TEZ_HISTORY_LOGGING_SERVICE_CLASS,
ATSHistoryLoggingService.class.getName());
yarnTimelineAddress = miniTezCluster.getConfig().get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS);
}
/**
* Run a word count example in mini cluster and check if it is possible to download
* data from ATS and parse it. Also, run with SimpleHistoryLogging option and verify
* if it matches with ATS data.
*
* @throws Exception
*/
@Test
public void testParserWithSuccessfulJob() throws Exception {
//Run basic word count example.
String dagId = runWordCount(WordCount.TokenProcessor.class.getName(),
WordCount.SumProcessor.class.getName(), "WordCount", true);
//Export the data from ATS
String[] args = { "--dagId=" + dagId, "--downloadDir=" + DOWNLOAD_DIR, "--yarnTimelineAddress=" + yarnTimelineAddress };
int result = ATSImportTool.process(args);
assertTrue(result == 0);
//Parse ATS data and verify results
DagInfo dagInfoFromATS = getDagInfo(dagId);
verifyDagInfo(dagInfoFromATS, true);
verifyJobSpecificInfo(dagInfoFromATS);
checkConfig(dagInfoFromATS);
//Now run with SimpleHistoryLogging
dagId = runWordCount(WordCount.TokenProcessor.class.getName(),
WordCount.SumProcessor.class.getName(), "WordCount", false);
Thread.sleep(10000); //For all flushes to happen and to avoid half-cooked download.
DagInfo shDagInfo = getDagInfoFromSimpleHistory(dagId);
verifyDagInfo(shDagInfo, false);
verifyJobSpecificInfo(shDagInfo);
//Compare dagInfo by parsing ATS data with DagInfo obtained by parsing SimpleHistoryLog
isDAGEqual(dagInfoFromATS, shDagInfo);
}
private DagInfo getDagInfoFromSimpleHistory(String dagId) throws TezException, IOException {
TezDAGID tezDAGID = TezDAGID.fromString(dagId);
ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(tezDAGID
.getApplicationId(), 1);
Path historyPath = new Path(conf.get("fs.defaultFS")
+ SIMPLE_HISTORY_DIR + HISTORY_TXT + "."
+ applicationAttemptId);
FileSystem fs = historyPath.getFileSystem(conf);
Path localPath = new Path(DOWNLOAD_DIR, HISTORY_TXT);
fs.copyToLocalFile(historyPath, localPath);
File localFile = new File(DOWNLOAD_DIR, HISTORY_TXT);
//Now parse via SimpleHistory
SimpleHistoryParser parser = new SimpleHistoryParser(Arrays.asList(localFile));
DagInfo dagInfo = parser.getDAGData(dagId);
assertTrue(dagInfo.getDagId().equals(dagId));
return dagInfo;
}
private void checkConfig(DagInfo dagInfo) {
assertTrue("DagInfo is " + dagInfo, dagInfo != null);
//Check configs
assertTrue("DagInfo config size=" + dagInfo.getAppConfig().size(),
dagInfo.getAppConfig().size() > 0);
//Sample config element
assertTrue("DagInfo config=" + dagInfo.getAppConfig(),
Integer.parseInt(dagInfo.getAppConfig().get("dfs.replication")) > 0);
}
private void verifyJobSpecificInfo(DagInfo dagInfo) {
//Job specific
assertTrue(dagInfo.getNumVertices() == 2);
assertTrue(dagInfo.getName().equals("WordCount"));
assertTrue(dagInfo.getVertex(TOKENIZER).getProcessorClassName().equals(
WordCount.TokenProcessor.class.getName()));
assertTrue(dagInfo.getVertex(SUMMATION).getProcessorClassName()
.equals(WordCount.SumProcessor.class.getName()));
assertTrue(dagInfo.getFinishTime() > dagInfo.getStartTime());
assertTrue(dagInfo.getEdges().size() == 1);
EdgeInfo edgeInfo = dagInfo.getEdges().iterator().next();
assertTrue(edgeInfo.getDataMovementType().
equals(EdgeProperty.DataMovementType.SCATTER_GATHER.toString()));
assertTrue(edgeInfo.getSourceVertex().getVertexName().equals(TOKENIZER));
assertTrue(edgeInfo.getDestinationVertex().getVertexName().equals(SUMMATION));
assertTrue(edgeInfo.getInputVertexName().equals(TOKENIZER));
assertTrue(edgeInfo.getOutputVertexName().equals(SUMMATION));
assertTrue(edgeInfo.getEdgeSourceClass().equals(OrderedPartitionedKVOutput.class.getName()));
assertTrue(edgeInfo.getEdgeDestinationClass().equals(OrderedGroupedKVInput.class.getName()));
assertTrue(dagInfo.getVertices().size() == 2);
String lastSourceTA = null;
String lastDataEventSourceTA = null;
for (VertexInfo vertexInfo : dagInfo.getVertices()) {
assertTrue(vertexInfo.getKilledTasksCount() == 0);
assertTrue(vertexInfo.getInitRequestedTime() > 0);
assertTrue(vertexInfo.getInitTime() > 0);
assertTrue(vertexInfo.getStartRequestedTime() > 0);
assertTrue(vertexInfo.getStartTime() > 0);
assertTrue(vertexInfo.getFinishTime() > 0);
assertTrue(vertexInfo.getFinishTime() > vertexInfo.getStartTime());
long finishTime = 0;
for (TaskInfo taskInfo : vertexInfo.getTasks()) {
assertTrue(taskInfo.getNumberOfTaskAttempts() == 1);
assertTrue(taskInfo.getMaxTaskAttemptDuration() >= 0);
assertTrue(taskInfo.getMinTaskAttemptDuration() >= 0);
assertTrue(taskInfo.getAvgTaskAttemptDuration() >= 0);
assertTrue(taskInfo.getLastTaskAttemptToFinish() != null);
assertTrue(taskInfo.getContainersMapping().size() > 0);
assertTrue(taskInfo.getSuccessfulTaskAttempts().size() > 0);
assertTrue(taskInfo.getFailedTaskAttempts().size() == 0);
assertTrue(taskInfo.getKilledTaskAttempts().size() == 0);
assertTrue(taskInfo.getFinishTime() > taskInfo.getStartTime());
List<TaskAttemptInfo> attempts = taskInfo.getTaskAttempts();
if (vertexInfo.getVertexName().equals(TOKENIZER)) {
// get the last task to finish and track its successful attempt
if (finishTime < taskInfo.getFinishTime()) {
finishTime = taskInfo.getFinishTime();
lastSourceTA = taskInfo.getSuccessfulAttemptId();
}
} else {
for (TaskAttemptInfo attempt : attempts) {
DataDependencyEvent item = attempt.getLastDataEvents().get(0);
assertTrue(item.getTimestamp() > 0);
if (lastDataEventSourceTA == null) {
lastDataEventSourceTA = item.getTaskAttemptId();
} else {
// all attempts should have the same last data event source TA
assertTrue(lastDataEventSourceTA.equals(item.getTaskAttemptId()));
}
}
}
for (TaskAttemptInfo attemptInfo : taskInfo.getTaskAttempts()) {
assertTrue(attemptInfo.getCreationTime() > 0);
assertTrue(attemptInfo.getAllocationTime() > 0);
assertTrue(attemptInfo.getStartTime() > 0);
assertTrue(attemptInfo.getFinishTime() > attemptInfo.getStartTime());
}
}
assertTrue(vertexInfo.getLastTaskToFinish() != null);
if (vertexInfo.getVertexName().equals(TOKENIZER)) {
assertTrue(vertexInfo.getInputEdges().size() == 0);
assertTrue(vertexInfo.getOutputEdges().size() == 1);
assertTrue(vertexInfo.getOutputVertices().size() == 1);
assertTrue(vertexInfo.getInputVertices().size() == 0);
} else {
assertTrue(vertexInfo.getInputEdges().size() == 1);
assertTrue(vertexInfo.getOutputEdges().size() == 0);
assertTrue(vertexInfo.getOutputVertices().size() == 0);
assertTrue(vertexInfo.getInputVertices().size() == 1);
}
}
assertTrue(lastSourceTA.equals(lastDataEventSourceTA));
}
/**
* Run a word count example in mini cluster.
* Provide invalid URL for ATS.
*
* @throws Exception
*/
@Test
public void testParserWithSuccessfulJob_InvalidATS() throws Exception {
//Run basic word count example.
String dagId = runWordCount(WordCount.TokenProcessor.class.getName(),
WordCount.SumProcessor.class.getName(), "WordCount-With-WrongATS-URL", true);
//Export the data from ATS
String atsAddress = "--atsAddress=http://atsHost:8188";
String[] args = { "--dagId=" + dagId,
"--downloadDir=" + DOWNLOAD_DIR,
atsAddress
};
try {
int result = ATSImportTool.process(args);
fail("Should have failed with processException");
} catch(ParseException e) {
//expects exception
}
}
/**
* Run a failed job and parse the data from ATS
*/
@Test
public void testParserWithFailedJob() throws Exception {
//Run a job which would fail
String dagId = runWordCount(WordCount.TokenProcessor.class.getName(), FailProcessor.class
.getName(), "WordCount-With-Exception", true);
//Export the data from ATS
String[] args = { "--dagId=" + dagId, "--downloadDir=" + DOWNLOAD_DIR, "--yarnTimelineAddress=" + yarnTimelineAddress };
int result = ATSImportTool.process(args);
assertTrue(result == 0);
//Parse ATS data
DagInfo dagInfo = getDagInfo(dagId);
//Applicable for ATS dataset
checkConfig(dagInfo);
//Verify DAGInfo. Verifies vertex, task, taskAttempts in recursive manner
verifyDagInfo(dagInfo, true);
//Dag specific
VertexInfo summationVertex = dagInfo.getVertex(SUMMATION);
assertTrue(summationVertex.getFailedTasks().size() == 1); //1 task, 4 attempts failed
assertTrue(summationVertex.getFailedTasks().get(0).getFailedTaskAttempts().size() == 4);
assertTrue(summationVertex.getStatus().equals(VertexState.FAILED.toString()));
assertTrue(dagInfo.getFailedVertices().size() == 1);
assertTrue(dagInfo.getFailedVertices().get(0).getVertexName().equals(SUMMATION));
assertTrue(dagInfo.getSuccessfullVertices().size() == 1);
assertTrue(dagInfo.getSuccessfullVertices().get(0).getVertexName().equals(TOKENIZER));
assertTrue(dagInfo.getStatus().equals(DAGState.FAILED.toString()));
verifyCounter(dagInfo.getCounter(DAGCounter.NUM_FAILED_TASKS.toString()), null, 4);
verifyCounter(dagInfo.getCounter(DAGCounter.NUM_SUCCEEDED_TASKS.toString()), null, 1);
verifyCounter(dagInfo.getCounter(DAGCounter.TOTAL_LAUNCHED_TASKS.toString()), null, 5);
verifyCounter(dagInfo.getCounter(TaskCounter.INPUT_RECORDS_PROCESSED.toString()),
"TaskCounter_Tokenizer_INPUT_Input", 10);
verifyCounter(dagInfo.getCounter(TaskCounter.ADDITIONAL_SPILLS_BYTES_READ.toString()),
"TaskCounter_Tokenizer_OUTPUT_Summation", 0);
verifyCounter(dagInfo.getCounter(TaskCounter.OUTPUT_RECORDS.toString()),
"TaskCounter_Tokenizer_OUTPUT_Summation",
20); //Every line has 2 words. 10 lines x 2 words = 20
verifyCounter(dagInfo.getCounter(TaskCounter.SPILLED_RECORDS.toString()),
"TaskCounter_Tokenizer_OUTPUT_Summation", 20); //Same as above
for (TaskInfo taskInfo : summationVertex.getTasks()) {
TaskAttemptInfo lastAttempt = null;
for (TaskAttemptInfo attemptInfo : taskInfo.getTaskAttempts()) {
if (lastAttempt != null) {
// failed attempt should be causal TA of next attempt
assertTrue(lastAttempt.getTaskAttemptId().equals(attemptInfo.getCreationCausalTA()));
assertTrue(lastAttempt.getTerminationCause() != null);
}
lastAttempt = attemptInfo;
}
}
//TODO: Need to check for SUMMATION vertex counters. Since all attempts are failed, counters are not getting populated.
//TaskCounter.REDUCE_INPUT_RECORDS
//Verify if the processor exception is given in diagnostics
assertTrue(dagInfo.getDiagnostics().contains("Failing this processor for some reason"));
}
/**
* Adding explicit equals here instead of in DAG/Vertex/Edge where hashCode also needs to
* change. Also, some custom comparisons are done here for unit testing.
*/
private void isDAGEqual(DagInfo dagInfo1, DagInfo dagInfo2) {
assertNotNull(dagInfo1);
assertNotNull(dagInfo2);
assertEquals(dagInfo1.getStatus(), dagInfo2.getStatus());
isEdgeEqual(dagInfo1.getEdges(), dagInfo2.getEdges());
isVertexEqual(dagInfo1.getVertices(), dagInfo2.getVertices());
}
private void isVertexEqual(VertexInfo vertexInfo1, VertexInfo vertexInfo2) {
assertTrue(vertexInfo1 != null);
assertTrue(vertexInfo2 != null);
assertTrue(vertexInfo1.getVertexName().equals(vertexInfo2.getVertexName()));
assertTrue(vertexInfo1.getProcessorClassName().equals(vertexInfo2.getProcessorClassName()));
assertTrue(vertexInfo1.getNumTasks() == vertexInfo2.getNumTasks());
assertTrue(vertexInfo1.getCompletedTasksCount() == vertexInfo2.getCompletedTasksCount());
assertTrue(vertexInfo1.getStatus().equals(vertexInfo2.getStatus()));
isEdgeEqual(vertexInfo1.getInputEdges(), vertexInfo2.getInputEdges());
isEdgeEqual(vertexInfo1.getOutputEdges(), vertexInfo2.getOutputEdges());
assertTrue(vertexInfo1.getInputVertices().size() == vertexInfo2.getInputVertices().size());
assertTrue(vertexInfo1.getOutputVertices().size() == vertexInfo2.getOutputVertices().size());
assertTrue(vertexInfo1.getNumTasks() == vertexInfo2.getNumTasks());
isTaskEqual(vertexInfo1.getTasks(), vertexInfo2.getTasks());
}
private void isVertexEqual(List<VertexInfo> vertexList1, List<VertexInfo> vertexList2) {
assertTrue("Vertices sizes should be the same", vertexList1.size() == vertexList2.size());
Iterator<VertexInfo> it1 = vertexList1.iterator();
Iterator<VertexInfo> it2 = vertexList2.iterator();
while (it1.hasNext()) {
assertTrue(it2.hasNext());
VertexInfo info1 = it1.next();
VertexInfo info2 = it2.next();
isVertexEqual(info1, info2);
}
}
private void isEdgeEqual(EdgeInfo edgeInfo1, EdgeInfo edgeInfo2) {
assertTrue(edgeInfo1 != null);
assertTrue(edgeInfo2 != null);
String info1 = edgeInfo1.toString();
String info2 = edgeInfo1.toString();
assertTrue(info1.equals(info2));
}
private void isEdgeEqual(Collection<EdgeInfo> info1, Collection<EdgeInfo> info2) {
assertTrue("sizes should be the same", info1.size() == info1.size());
Iterator<EdgeInfo> it1 = info1.iterator();
Iterator<EdgeInfo> it2 = info2.iterator();
while (it1.hasNext()) {
assertTrue(it2.hasNext());
isEdgeEqual(it1.next(), it2.next());
}
}
private void isTaskEqual(Collection<TaskInfo> info1, Collection<TaskInfo> info2) {
assertTrue("sizes should be the same", info1.size() == info1.size());
Iterator<TaskInfo> it1 = info1.iterator();
Iterator<TaskInfo> it2 = info2.iterator();
while (it1.hasNext()) {
assertTrue(it2.hasNext());
isTaskEqual(it1.next(), it2.next());
}
}
private void isTaskEqual(TaskInfo taskInfo1, TaskInfo taskInfo2) {
assertTrue(taskInfo1 != null);
assertTrue(taskInfo2 != null);
assertTrue(taskInfo1.getVertexInfo() != null);
assertTrue(taskInfo2.getVertexInfo() != null);
assertTrue(taskInfo1.getStatus().equals(taskInfo2.getStatus()));
assertTrue(
taskInfo1.getVertexInfo().getVertexName()
.equals(taskInfo2.getVertexInfo().getVertexName()));
isTaskAttemptEqual(taskInfo1.getTaskAttempts(), taskInfo2.getTaskAttempts());
//Verify counters
isCountersSame(taskInfo1, taskInfo2);
}
private void isCountersSame(BaseInfo info1, BaseInfo info2) {
isCounterSame(info1.getCounter(TaskCounter.ADDITIONAL_SPILL_COUNT.name()),
info2.getCounter(TaskCounter.ADDITIONAL_SPILL_COUNT.name()));
isCounterSame(info1.getCounter(TaskCounter.SPILLED_RECORDS.name()),
info2.getCounter(TaskCounter.SPILLED_RECORDS.name()));
isCounterSame(info1.getCounter(TaskCounter.OUTPUT_RECORDS.name()),
info2.getCounter(TaskCounter.OUTPUT_RECORDS.name()));
isCounterSame(info1.getCounter(TaskCounter.OUTPUT_BYTES.name()),
info2.getCounter(TaskCounter.OUTPUT_BYTES.name()));
isCounterSame(info1.getCounter(TaskCounter.OUTPUT_RECORDS.name()),
info2.getCounter(TaskCounter.OUTPUT_RECORDS.name()));
isCounterSame(info1.getCounter(TaskCounter.REDUCE_INPUT_GROUPS.name()),
info2.getCounter(TaskCounter.REDUCE_INPUT_GROUPS.name()));
isCounterSame(info1.getCounter(TaskCounter.REDUCE_INPUT_RECORDS.name()),
info2.getCounter(TaskCounter.REDUCE_INPUT_RECORDS.name()));
}
private void isCounterSame(Map<String, TezCounter> counter1, Map<String, TezCounter> counter2) {
for (Map.Entry<String, TezCounter> entry : counter1.entrySet()) {
String source = entry.getKey();
long val = entry.getValue().getValue();
//check if other counter has the same value
assertTrue(counter2.containsKey(entry.getKey()));
assertTrue(counter2.get(entry.getKey()).getValue() == val);
}
}
private void isTaskAttemptEqual(Collection<TaskAttemptInfo> info1,
Collection<TaskAttemptInfo> info2) {
assertTrue("sizes should be the same", info1.size() == info1.size());
Iterator<TaskAttemptInfo> it1 = info1.iterator();
Iterator<TaskAttemptInfo> it2 = info2.iterator();
while (it1.hasNext()) {
assertTrue(it2.hasNext());
isTaskAttemptEqual(it1.next(), it2.next());
}
}
private void isTaskAttemptEqual(TaskAttemptInfo info1, TaskAttemptInfo info2) {
assertTrue(info1 != null);
assertTrue(info2 != null);
assertTrue(info1.getTaskInfo() != null);
assertTrue(info2.getTaskInfo() != null);
assertTrue(info1.getStatus().equals(info2.getStatus()));
assertTrue(info1.getTaskInfo().getVertexInfo().getVertexName().equals(info2.getTaskInfo()
.getVertexInfo().getVertexName()));
//Verify counters
isCountersSame(info1, info2);
}
/**
* Create sample file for wordcount program
*
* @param inputLoc
* @throws IOException
*/
private static void createSampleFile(Path inputLoc) throws IOException {
fs.deleteOnExit(inputLoc);
FSDataOutputStream out = fs.create(inputLoc);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out));
for (int i = 0; i < 10; i++) {
writer.write("Sample " + RandomStringUtils.randomAlphanumeric(5));
writer.newLine();
}
writer.close();
}
private DagInfo getDagInfo(String dagId) throws TezException {
//Parse downloaded contents
File downloadedFile = new File(DOWNLOAD_DIR
+ Path.SEPARATOR + dagId + ".zip");
ATSFileParser parser = new ATSFileParser(Arrays.asList(downloadedFile));
DagInfo dagInfo = parser.getDAGData(dagId);
assertTrue(dagInfo.getDagId().equals(dagId));
return dagInfo;
}
private void verifyCounter(Map<String, TezCounter> counterMap,
String counterGroupName, long expectedVal) {
//Iterate through group-->tezCounter
for (Map.Entry<String, TezCounter> entry : counterMap.entrySet()) {
if (counterGroupName != null) {
if (entry.getKey().equals(counterGroupName)) {
assertTrue(entry.getValue().getValue() == expectedVal);
}
} else {
assertTrue(entry.getValue().getValue() == expectedVal);
}
}
}
TezClient getTezClient(boolean withTimeline) throws Exception {
TezConfiguration tezConf = new TezConfiguration(miniTezCluster.getConfig());
if (withTimeline) {
tezConf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, withTimeline);
tezConf.set(TezConfiguration.TEZ_HISTORY_LOGGING_SERVICE_CLASS,
ATSHistoryLoggingService.class.getName());
} else {
tezConf.set(TezConfiguration.TEZ_HISTORY_LOGGING_SERVICE_CLASS,
SimpleHistoryLoggingService.class.getName());
}
tezConf.setBoolean(TezConfiguration.TEZ_AM_ALLOW_DISABLED_TIMELINE_DOMAINS, true);
TezClient tezClient = TezClient.create("WordCount", tezConf, false);
tezClient.start();
tezClient.waitTillReady();
return tezClient;
}
private String runWordCount(String tokenizerProcessor, String summationProcessor,
String dagName, boolean withTimeline)
throws Exception {
//HDFS path
Path outputLoc = new Path("/tmp/outPath_" + System.currentTimeMillis());
DataSourceDescriptor dataSource = MRInput.createConfigBuilder(conf,
TextInputFormat.class, inputLoc.toString()).build();
DataSinkDescriptor dataSink =
MROutput.createConfigBuilder(conf, TextOutputFormat.class, outputLoc.toString()).build();
Vertex tokenizerVertex = Vertex.create(TOKENIZER, ProcessorDescriptor.create(
tokenizerProcessor)).addDataSource(INPUT, dataSource);
OrderedPartitionedKVEdgeConfig edgeConf = OrderedPartitionedKVEdgeConfig
.newBuilder(Text.class.getName(), IntWritable.class.getName(),
HashPartitioner.class.getName()).build();
Vertex summationVertex = Vertex.create(SUMMATION,
ProcessorDescriptor.create(summationProcessor), 1).addDataSink(OUTPUT, dataSink);
// Create DAG and add the vertices. Connect the producer and consumer vertices via the edge
DAG dag = DAG.create(dagName);
dag.addVertex(tokenizerVertex).addVertex(summationVertex).addEdge(
Edge.create(tokenizerVertex, summationVertex, edgeConf.createDefaultEdgeProperty()));
TezClient tezClient = getTezClient(withTimeline);
// Update Caller Context
CallerContext callerContext = CallerContext.create("TezExamples", "Tez WordCount Example Job");
ApplicationId appId = tezClient.getAppMasterApplicationId();
if (appId == null) {
appId = ApplicationId.newInstance(1001l, 1);
}
callerContext.setCallerIdAndType(appId.toString(), "TezApplication");
dag.setCallerContext(callerContext);
DAGClient client = tezClient.submitDAG(dag);
client.waitForCompletionWithStatusUpdates(Sets.newHashSet(StatusGetOpts.GET_COUNTERS));
TezDAGID tezDAGID = TezDAGID.getInstance(tezClient.getAppMasterApplicationId(), 1);
if (tezClient != null) {
tezClient.stop();
}
return tezDAGID.toString();
}
/**
* Processor which would just throw exception.
*/
public static class FailProcessor extends SimpleMRProcessor {
public FailProcessor(ProcessorContext context) {
super(context);
}
@Override
public void run() throws Exception {
throw new Exception("Failing this processor for some reason");
}
}
private void verifyDagInfo(DagInfo dagInfo, boolean ats) {
if (ats) {
VersionInfo versionInfo = dagInfo.getVersionInfo();
assertTrue(versionInfo != null); //should be present post 0.5.4
assertTrue(versionInfo.getVersion() != null);
assertTrue(versionInfo.getRevision() != null);
assertTrue(versionInfo.getBuildTime() != null);
}
assertTrue(dagInfo.getUserName() != null);
assertTrue(!dagInfo.getUserName().isEmpty());
assertTrue(dagInfo.getStartTime() > 0);
assertTrue(dagInfo.getFinishTimeInterval() > 0);
assertTrue(dagInfo.getStartTimeInterval() == 0);
assertTrue(dagInfo.getStartTime() > 0);
if (dagInfo.getStatus().equalsIgnoreCase(DAGState.SUCCEEDED.toString())) {
assertTrue(dagInfo.getFinishTime() >= dagInfo.getStartTime());
}
assertTrue(dagInfo.getFinishTimeInterval() > dagInfo.getStartTimeInterval());
assertTrue(dagInfo.getStartTime() > dagInfo.getSubmitTime());
assertTrue(dagInfo.getTimeTaken() > 0);
assertNotNull(dagInfo.getCallerContext());
assertEquals("TezExamples", dagInfo.getCallerContext().getContext());
assertEquals("Tez WordCount Example Job", dagInfo.getCallerContext().getBlob());
assertNotNull(dagInfo.getCallerContext().getCallerId());
assertEquals("TezApplication", dagInfo.getCallerContext().getCallerType());
//Verify all vertices
for (VertexInfo vertexInfo : dagInfo.getVertices()) {
verifyVertex(vertexInfo, vertexInfo.getFailedTasksCount() > 0);
}
VertexInfo fastestVertex = dagInfo.getFastestVertex();
assertTrue(fastestVertex != null);
if (dagInfo.getStatus().equals(DAGState.SUCCEEDED)) {
assertTrue(dagInfo.getSlowestVertex() != null);
}
}
private void verifyVertex(VertexInfo vertexInfo, boolean hasFailedTasks) {
assertTrue(vertexInfo != null);
if (hasFailedTasks) {
assertTrue(vertexInfo.getFailedTasksCount() > 0);
}
assertTrue(vertexInfo.getStartTimeInterval() > 0);
assertTrue(vertexInfo.getStartTime() > 0);
assertTrue(vertexInfo.getFinishTimeInterval() > 0);
assertTrue(vertexInfo.getStartTimeInterval() < vertexInfo.getFinishTimeInterval());
assertTrue(vertexInfo.getVertexName() != null);
if (!hasFailedTasks) {
assertTrue(vertexInfo.getFinishTime() > 0);
assertTrue(vertexInfo.getFailedTasks().size() == 0);
assertTrue(vertexInfo.getSucceededTasksCount() == vertexInfo.getSuccessfulTasks().size());
assertTrue(vertexInfo.getFailedTasksCount() == 0);
assertTrue(vertexInfo.getAvgTaskDuration() > 0);
assertTrue(vertexInfo.getMaxTaskDuration() > 0);
assertTrue(vertexInfo.getMinTaskDuration() > 0);
assertTrue(vertexInfo.getTimeTaken() > 0);
assertTrue(vertexInfo.getStatus().equalsIgnoreCase(VertexState.SUCCEEDED.toString()));
assertTrue(vertexInfo.getCompletedTasksCount() > 0);
assertTrue(vertexInfo.getFirstTaskToStart() != null);
assertTrue(vertexInfo.getSucceededTasksCount() > 0);
assertTrue(vertexInfo.getTasks().size() > 0);
assertTrue(vertexInfo.getFinishTime() > vertexInfo.getStartTime());
}
for (TaskInfo taskInfo : vertexInfo.getTasks()) {
if (taskInfo.getStatus().equals(TaskState.SUCCEEDED.toString())) {
verifyTask(taskInfo, false);
}
}
for (TaskInfo taskInfo : vertexInfo.getFailedTasks()) {
verifyTask(taskInfo, true);
}
assertTrue(vertexInfo.getProcessorClassName() != null);
assertTrue(vertexInfo.getStatus() != null);
assertTrue(vertexInfo.getDagInfo() != null);
assertTrue(vertexInfo.getInitTimeInterval() > 0);
assertTrue(vertexInfo.getNumTasks() > 0);
}
private void verifyTask(TaskInfo taskInfo, boolean hasFailedAttempts) {
assertTrue(taskInfo != null);
assertTrue(taskInfo.getStatus() != null);
assertTrue(taskInfo.getStartTimeInterval() > 0);
//Not testing for killed attempts. So if there are no failures, it should succeed
if (!hasFailedAttempts) {
assertTrue(taskInfo.getStatus().equals(TaskState.SUCCEEDED.toString()));
assertTrue(taskInfo.getFinishTimeInterval() > 0 && taskInfo.getFinishTime() > taskInfo
.getFinishTimeInterval());
assertTrue(
taskInfo.getStartTimeInterval() > 0 && taskInfo.getStartTime() > taskInfo.getStartTimeInterval());
assertTrue(taskInfo.getSuccessfulAttemptId() != null);
assertTrue(taskInfo.getSuccessfulTaskAttempt() != null);
assertTrue(taskInfo.getFinishTime() > taskInfo.getStartTime());
}
assertTrue(taskInfo.getTaskId() != null);
for (TaskAttemptInfo attemptInfo : taskInfo.getTaskAttempts()) {
verifyTaskAttemptInfo(attemptInfo);
}
}
private void verifyTaskAttemptInfo(TaskAttemptInfo attemptInfo) {
if (attemptInfo.getStatus() != null && attemptInfo.getStatus()
.equals(TaskAttemptState.SUCCEEDED)) {
assertTrue(attemptInfo.getStartTimeInterval() > 0);
assertTrue(attemptInfo.getFinishTimeInterval() > 0);
assertTrue(attemptInfo.getCreationTime() > 0);
assertTrue(attemptInfo.getAllocationTime() > 0);
assertTrue(attemptInfo.getStartTime() > 0);
assertTrue(attemptInfo.getFinishTime() > 0);
assertTrue(attemptInfo.getFinishTime() > attemptInfo.getStartTime());
assertTrue(attemptInfo.getFinishTime() > attemptInfo.getFinishTimeInterval());
assertTrue(attemptInfo.getStartTime() > attemptInfo.getStartTimeInterval());
assertTrue(attemptInfo.getNodeId() != null);
assertTrue(attemptInfo.getTimeTaken() != -1);
assertTrue(attemptInfo.getEvents() != null);
assertTrue(attemptInfo.getTezCounters() != null);
assertTrue(attemptInfo.getContainer() != null);
}
assertTrue(attemptInfo.getTaskInfo() != null);
}
}
|
googleapis/google-cloud-java | 35,634 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/IngestContextReferencesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/conversation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The response message for [ConversationsService.IngestContextReferences][].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse}
*/
public final class IngestContextReferencesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse)
IngestContextReferencesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use IngestContextReferencesResponse.newBuilder() to construct.
private IngestContextReferencesResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private IngestContextReferencesResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new IngestContextReferencesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetIngestedContextReferences();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse.class,
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse.Builder.class);
}
public static final int INGESTED_CONTEXT_REFERENCES_FIELD_NUMBER = 1;
private static final class IngestedContextReferencesDefaultEntryHolder {
static final com.google.protobuf.MapEntry<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
newDefaultInstance(
com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_IngestedContextReferencesEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.MESSAGE,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference
.getDefaultInstance());
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
ingestedContextReferences_;
private com.google.protobuf.MapField<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
internalGetIngestedContextReferences() {
if (ingestedContextReferences_ == null) {
return com.google.protobuf.MapField.emptyMapField(
IngestedContextReferencesDefaultEntryHolder.defaultEntry);
}
return ingestedContextReferences_;
}
public int getIngestedContextReferencesCount() {
return internalGetIngestedContextReferences().getMap().size();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public boolean containsIngestedContextReferences(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetIngestedContextReferences().getMap().containsKey(key);
}
/** Use {@link #getIngestedContextReferencesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
getIngestedContextReferences() {
return getIngestedContextReferencesMap();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
getIngestedContextReferencesMap() {
return internalGetIngestedContextReferences().getMap();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public /* nullable */ com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference
getIngestedContextReferencesOrDefault(
java.lang.String key,
/* nullable */
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
map = internalGetIngestedContextReferences().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference
getIngestedContextReferencesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
map = internalGetIngestedContextReferences().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output,
internalGetIngestedContextReferences(),
IngestedContextReferencesDefaultEntryHolder.defaultEntry,
1);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (java.util.Map.Entry<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
entry : internalGetIngestedContextReferences().getMap().entrySet()) {
com.google.protobuf.MapEntry<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
ingestedContextReferences__ =
IngestedContextReferencesDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, ingestedContextReferences__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse other =
(com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse) obj;
if (!internalGetIngestedContextReferences()
.equals(other.internalGetIngestedContextReferences())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (!internalGetIngestedContextReferences().getMap().isEmpty()) {
hash = (37 * hash) + INGESTED_CONTEXT_REFERENCES_FIELD_NUMBER;
hash = (53 * hash) + internalGetIngestedContextReferences().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for [ConversationsService.IngestContextReferences][].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse)
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetIngestedContextReferences();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetMutableIngestedContextReferences();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse.class,
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse.Builder.class);
}
// Construct using
// com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
internalGetMutableIngestedContextReferences().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.ConversationProto
.internal_static_google_cloud_dialogflow_v2beta1_IngestContextReferencesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse build() {
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse buildPartial() {
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse result =
new com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ingestedContextReferences_ =
internalGetIngestedContextReferences()
.build(IngestedContextReferencesDefaultEntryHolder.defaultEntry);
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse) {
return mergeFrom(
(com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse other) {
if (other
== com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
.getDefaultInstance()) return this;
internalGetMutableIngestedContextReferences()
.mergeFrom(other.internalGetIngestedContextReferences());
bitField0_ |= 0x00000001;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.MapEntry<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
ingestedContextReferences__ =
input.readMessage(
IngestedContextReferencesDefaultEntryHolder.defaultEntry
.getParserForType(),
extensionRegistry);
internalGetMutableIngestedContextReferences()
.ensureBuilderMap()
.put(
ingestedContextReferences__.getKey(),
ingestedContextReferences__.getValue());
bitField0_ |= 0x00000001;
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private static final class IngestedContextReferencesConverter
implements com.google.protobuf.MapFieldBuilder.Converter<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference> {
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference build(
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder val) {
if (val instanceof com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference) {
return (com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference) val;
}
return ((com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder) val)
.build();
}
@java.lang.Override
public com.google.protobuf.MapEntry<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
defaultEntry() {
return IngestedContextReferencesDefaultEntryHolder.defaultEntry;
}
}
;
private static final IngestedContextReferencesConverter ingestedContextReferencesConverter =
new IngestedContextReferencesConverter();
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder>
ingestedContextReferences_;
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder>
internalGetIngestedContextReferences() {
if (ingestedContextReferences_ == null) {
return new com.google.protobuf.MapFieldBuilder<>(ingestedContextReferencesConverter);
}
return ingestedContextReferences_;
}
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder>
internalGetMutableIngestedContextReferences() {
if (ingestedContextReferences_ == null) {
ingestedContextReferences_ =
new com.google.protobuf.MapFieldBuilder<>(ingestedContextReferencesConverter);
}
bitField0_ |= 0x00000001;
onChanged();
return ingestedContextReferences_;
}
public int getIngestedContextReferencesCount() {
return internalGetIngestedContextReferences().ensureBuilderMap().size();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public boolean containsIngestedContextReferences(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetIngestedContextReferences().ensureBuilderMap().containsKey(key);
}
/** Use {@link #getIngestedContextReferencesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
getIngestedContextReferences() {
return getIngestedContextReferencesMap();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
getIngestedContextReferencesMap() {
return internalGetIngestedContextReferences().getImmutableMap();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public /* nullable */ com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference
getIngestedContextReferencesOrDefault(
java.lang.String key,
/* nullable */
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder>
map = internalGetMutableIngestedContextReferences().ensureBuilderMap();
return map.containsKey(key)
? ingestedContextReferencesConverter.build(map.get(key))
: defaultValue;
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference
getIngestedContextReferencesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder>
map = internalGetMutableIngestedContextReferences().ensureBuilderMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return ingestedContextReferencesConverter.build(map.get(key));
}
public Builder clearIngestedContextReferences() {
bitField0_ = (bitField0_ & ~0x00000001);
internalGetMutableIngestedContextReferences().clear();
return this;
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
public Builder removeIngestedContextReferences(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableIngestedContextReferences().ensureBuilderMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
getMutableIngestedContextReferences() {
bitField0_ |= 0x00000001;
return internalGetMutableIngestedContextReferences().ensureMessageMap();
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
public Builder putIngestedContextReferences(
java.lang.String key,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableIngestedContextReferences().ensureBuilderMap().put(key, value);
bitField0_ |= 0x00000001;
return this;
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
public Builder putAllIngestedContextReferences(
java.util.Map<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
values) {
for (java.util.Map.Entry<
java.lang.String, com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference>
e : values.entrySet()) {
if (e.getKey() == null || e.getValue() == null) {
throw new NullPointerException();
}
}
internalGetMutableIngestedContextReferences().ensureBuilderMap().putAll(values);
bitField0_ |= 0x00000001;
return this;
}
/**
*
*
* <pre>
* All context references ingested.
* </pre>
*
* <code>
* map<string, .google.cloud.dialogflow.v2beta1.Conversation.ContextReference> ingested_context_references = 1;
* </code>
*/
public com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder
putIngestedContextReferencesBuilderIfAbsent(java.lang.String key) {
java.util.Map<
java.lang.String,
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder>
builderMap = internalGetMutableIngestedContextReferences().ensureBuilderMap();
com.google.cloud.dialogflow.v2beta1.Conversation.ContextReferenceOrBuilder entry =
builderMap.get(key);
if (entry == null) {
entry = com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.newBuilder();
builderMap.put(key, entry);
}
if (entry instanceof com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference) {
entry =
((com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference) entry).toBuilder();
builderMap.put(key, entry);
}
return (com.google.cloud.dialogflow.v2beta1.Conversation.ContextReference.Builder) entry;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse)
private static final com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse();
}
public static com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<IngestContextReferencesResponse> PARSER =
new com.google.protobuf.AbstractParser<IngestContextReferencesResponse>() {
@java.lang.Override
public IngestContextReferencesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<IngestContextReferencesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<IngestContextReferencesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.IngestContextReferencesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-geometry | 35,809 | commons-geometry-euclidean/src/main/java/org/apache/commons/geometry/euclidean/threed/rotation/QuaternionRotation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.geometry.euclidean.threed.rotation;
import java.util.Objects;
import java.util.function.DoubleFunction;
import org.apache.commons.geometry.core.internal.GeometryInternalError;
import org.apache.commons.geometry.euclidean.internal.Vectors;
import org.apache.commons.geometry.euclidean.threed.AffineTransformMatrix3D;
import org.apache.commons.geometry.euclidean.threed.Vector3D;
import org.apache.commons.numbers.angle.Angle;
import org.apache.commons.numbers.quaternion.Quaternion;
import org.apache.commons.numbers.quaternion.Slerp;
/**
* Class using a unit-length quaternion to represent
* <a href="https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation">rotations</a>
* in 3-dimensional Euclidean space.
* The underlying quaternion is in <em>positive polar form</em>: It is normalized and has a
* non-negative scalar component ({@code w}).
*
* @see Quaternion
*/
public final class QuaternionRotation implements Rotation3D {
/** Threshold value for the dot product of antiparallel vectors. If the dot product of two vectors is
* less than this value, (adjusted for the lengths of the vectors), then the vectors are considered to be
* antiparallel (ie, negations of each other).
*/
private static final double ANTIPARALLEL_DOT_THRESHOLD = 2.0e-15 - 1.0;
/** Threshold value used to identify singularities when converting from quaternions to
* axis angle sequences.
*/
private static final double AXIS_ANGLE_SINGULARITY_THRESHOLD = 0.9999999999;
/** Instance used to represent the identity rotation, ie a rotation with
* an angle of zero.
*/
private static final QuaternionRotation IDENTITY_INSTANCE = of(Quaternion.ONE);
/** Unit-length quaternion instance in positive polar form. */
private final Quaternion quat;
/** Simple constructor. The given quaternion is converted to positive polar form.
* @param quat quaternion instance
* @throws IllegalStateException if the norm of the given components is zero,
* NaN, or infinite
*/
private QuaternionRotation(final Quaternion quat) {
this.quat = quat.positivePolarForm();
}
/** Get the underlying quaternion instance.
* @return the quaternion instance
*/
public Quaternion getQuaternion() {
return quat;
}
/**
* Get the axis of rotation as a normalized {@link Vector3D}. The rotation axis
* is not well-defined when the rotation is the identity rotation, ie it has a
* rotation angle of zero. In this case, the vector representing the positive
* x-axis is returned.
*
* @return the axis of rotation
*/
@Override
public Vector3D getAxis() {
final Vector3D axis = Vector3D.of(quat.getX(), quat.getY(), quat.getZ())
.normalizeOrNull();
return axis != null ?
axis :
Vector3D.Unit.PLUS_X;
}
/**
* Get the angle of rotation in radians. The returned value is in the range 0
* through {@code pi}.
*
* @return The rotation angle in the range {@code [0, pi]}.
*/
@Override
public double getAngle() {
return 2 * Math.acos(quat.getW());
}
/**
* Get the inverse of this rotation. The returned rotation has the same
* rotation angle but the opposite rotation axis. If {@code r.apply(u)}
* is equal to {@code v}, then {@code r.negate().apply(v)} is equal
* to {@code u}.
*
* @return the negation (inverse) of the rotation
*/
@Override
public QuaternionRotation inverse() {
return new QuaternionRotation(quat.conjugate());
}
/**
* Apply this rotation to the given vector.
*
* @param v vector to rotate
* @return the rotated vector
*/
@Override
public Vector3D apply(final Vector3D v) {
final double qw = quat.getW();
final double qx = quat.getX();
final double qy = quat.getY();
final double qz = quat.getZ();
final double x = v.getX();
final double y = v.getY();
final double z = v.getZ();
// calculate the Hamilton product of the quaternion and vector
final double iw = -(qx * x) - (qy * y) - (qz * z);
final double ix = (qw * x) + (qy * z) - (qz * y);
final double iy = (qw * y) + (qz * x) - (qx * z);
final double iz = (qw * z) + (qx * y) - (qy * x);
// calculate the Hamilton product of the intermediate vector and
// the inverse quaternion
return Vector3D.of(
(iw * -qx) + (ix * qw) + (iy * -qz) - (iz * -qy),
(iw * -qy) - (ix * -qz) + (iy * qw) + (iz * -qx),
(iw * -qz) + (ix * -qy) - (iy * -qx) + (iz * qw)
);
}
/** {@inheritDoc}
*
* <p>This method simply calls {@code apply(vec)} since rotations treat
* points and vectors similarly.</p>
*/
@Override
public Vector3D applyVector(final Vector3D vec) {
return apply(vec);
}
/** {@inheritDoc}
*
* <p>This method simply returns true since rotations always preserve the orientation
* of the space.</p>
*/
@Override
public boolean preservesOrientation() {
return true;
}
/** Return an {@link AffineTransformMatrix3D} representing the same rotation as this
* instance.
* @return a transform matrix representing the same rotation as this instance
*/
public AffineTransformMatrix3D toMatrix() {
final double qw = quat.getW();
final double qx = quat.getX();
final double qy = quat.getY();
final double qz = quat.getZ();
// pre-calculate products that we'll need
final double xx = qx * qx;
final double xy = qx * qy;
final double xz = qx * qz;
final double xw = qx * qw;
final double yy = qy * qy;
final double yz = qy * qz;
final double yw = qy * qw;
final double zz = qz * qz;
final double zw = qz * qw;
final double m00 = 1.0 - (2.0 * (yy + zz));
final double m01 = 2.0 * (xy - zw);
final double m02 = 2.0 * (xz + yw);
final double m03 = 0.0;
final double m10 = 2.0 * (xy + zw);
final double m11 = 1.0 - (2.0 * (xx + zz));
final double m12 = 2.0 * (yz - xw);
final double m13 = 0.0;
final double m20 = 2.0 * (xz - yw);
final double m21 = 2.0 * (yz + xw);
final double m22 = 1.0 - (2.0 * (xx + yy));
final double m23 = 0.0;
return AffineTransformMatrix3D.of(
m00, m01, m02, m03,
m10, m11, m12, m13,
m20, m21, m22, m23
);
}
/**
* Multiply this instance by the given argument, returning the result as
* a new instance. This is equivalent to the expression {@code t * q} where
* {@code q} is the argument and {@code t} is this instance.
*
* <p>
* Multiplication of quaternions behaves similarly to transformation
* matrices in regard to the order that operations are performed.
* For example, if <code>q<sub>1</sub></code> and <code>q<sub>2</sub></code> are unit
* quaternions, then the quaternion <code>q<sub>r</sub> = q<sub>1</sub>*q<sub>2</sub></code>
* will give the effect of applying the rotation in <code>q<sub>2</sub></code> followed
* by the rotation in <code>q<sub>1</sub></code>. In other words, the rightmost element
* in the multiplication is applied first.
* </p>
*
* @param q quaternion to multiply with the current instance
* @return the result of multiplying this quaternion by the argument
*/
public QuaternionRotation multiply(final QuaternionRotation q) {
final Quaternion product = quat.multiply(q.quat);
return new QuaternionRotation(product);
}
/** Multiply the argument by this instance, returning the result as
* a new instance. This is equivalent to the expression {@code q * t} where
* {@code q} is the argument and {@code t} is this instance.
*
* <p>
* Multiplication of quaternions behaves similarly to transformation
* matrices in regard to the order that operations are performed.
* For example, if <code>q<sub>1</sub></code> and <code>q<sub>2</sub></code> are unit
* quaternions, then the quaternion <code>q<sub>r</sub> = q<sub>1</sub>*q<sub>2</sub></code>
* will give the effect of applying the rotation in <code>q<sub>2</sub></code> followed
* by the rotation in <code>q<sub>1</sub></code>. In other words, the rightmost element
* in the multiplication is applied first.
* </p>
*
* @param q quaternion to multiply by the current instance
* @return the result of multiplying the argument by the current instance
*/
public QuaternionRotation premultiply(final QuaternionRotation q) {
return q.multiply(this);
}
/**
* Creates a function that performs a
* <a href="https://en.wikipedia.org/wiki/Slerp">spherical
* linear interpolation</a> between this instance and the argument.
* <p>
* The argument to the function returned by this method is the
* interpolation parameter {@code t}.
* If {@code t = 0}, the rotation is equal to this instance.
* If {@code t = 1}, the rotation is equal to the {@code end} instance.
* All other values are interpolated (or extrapolated if {@code t} is
* outside of the {@code [0, 1]} range).
*
* @param end end value of the interpolation
* @return a function that interpolates between this instance and the
* argument.
*
* @see org.apache.commons.numbers.quaternion.Slerp
*/
public DoubleFunction<QuaternionRotation> slerp(final QuaternionRotation end) {
final Slerp s = new Slerp(getQuaternion(), end.getQuaternion());
return t -> of(s.apply(t));
}
/** Get a sequence of axis-angle rotations that produce an overall rotation equivalent to this instance.
*
* <p>
* In most cases, the returned rotation sequence will be unique. However, at points of singularity
* (second angle equal to {@code 0} or {@code -pi} for Euler angles and {@code +pi/2} or {@code -pi/2}
* for Tait-Bryan angles), there are an infinite number of possible sequences that produce the same result.
* In these cases, the result is returned that leaves the last rotation equal to 0 (in the case of a relative
* reference frame) or the first rotation equal to 0 (in the case of an absolute reference frame).
* </p>
*
* @param frame the reference frame used to interpret the positions of the rotation axes
* @param axes the sequence of rotation axes
* @return a sequence of axis-angle rotations equivalent to this rotation
*/
public AxisAngleSequence toAxisAngleSequence(final AxisReferenceFrame frame, final AxisSequence axes) {
if (frame == null) {
throw new IllegalArgumentException("Axis reference frame cannot be null");
}
if (axes == null) {
throw new IllegalArgumentException("Axis sequence cannot be null");
}
final double[] angles = getAngles(frame, axes);
return new AxisAngleSequence(frame, axes, angles[0], angles[1], angles[2]);
}
/** Get a sequence of axis-angle rotations that produce an overall rotation equivalent to this instance.
* Each rotation axis is interpreted relative to the rotated coordinate frame (ie, intrinsic rotation).
* @param axes the sequence of rotation axes
* @return a sequence of relative axis-angle rotations equivalent to this rotation
* @see #toAxisAngleSequence(AxisReferenceFrame, AxisSequence)
*/
public AxisAngleSequence toRelativeAxisAngleSequence(final AxisSequence axes) {
return toAxisAngleSequence(AxisReferenceFrame.RELATIVE, axes);
}
/** Get a sequence of axis-angle rotations that produce an overall rotation equivalent to this instance.
* Each rotation axis is interpreted as part of an absolute, unmoving coordinate frame (ie, extrinsic rotation).
* @param axes the sequence of rotation axes
* @return a sequence of absolute axis-angle rotations equivalent to this rotation
* @see #toAxisAngleSequence(AxisReferenceFrame, AxisSequence)
*/
public AxisAngleSequence toAbsoluteAxisAngleSequence(final AxisSequence axes) {
return toAxisAngleSequence(AxisReferenceFrame.ABSOLUTE, axes);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return quat.hashCode();
}
/** {@inheritDoc} */
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof QuaternionRotation)) {
return false;
}
final QuaternionRotation other = (QuaternionRotation) obj;
return Objects.equals(this.quat, other.quat);
}
/** {@inheritDoc} */
@Override
public String toString() {
return quat.toString();
}
/** Get a sequence of angles around the given axes that produce a rotation equivalent
* to this instance.
* @param frame the reference frame used to define the positions of the axes
* @param axes the axis sequence
* @return a sequence of angles around the given axes that produce a rotation equivalent
* to this instance
*/
private double[] getAngles(final AxisReferenceFrame frame, final AxisSequence axes) {
final AxisSequenceType sequenceType = axes.getType();
final Vector3D axis1 = axes.getAxis1();
final Vector3D axis2 = axes.getAxis2();
final Vector3D axis3 = axes.getAxis3();
if (frame == AxisReferenceFrame.RELATIVE) {
if (sequenceType == AxisSequenceType.TAIT_BRYAN) {
return getRelativeTaitBryanAngles(axis1, axis2, axis3);
} else if (sequenceType == AxisSequenceType.EULER) {
return getRelativeEulerAngles(axis1, axis2);
}
} else if (frame == AxisReferenceFrame.ABSOLUTE) {
if (sequenceType == AxisSequenceType.TAIT_BRYAN) {
return getAbsoluteTaitBryanAngles(axis1, axis2, axis3);
} else if (sequenceType == AxisSequenceType.EULER) {
return getAbsoluteEulerAngles(axis1, axis2);
}
}
// all possibilities should have been covered above
throw new GeometryInternalError();
}
/** Get a sequence of angles around the given Tait-Bryan axes that produce a rotation equivalent
* to this instance. The axes are interpreted as being relative to the rotated coordinate frame.
* @param axis1 first Tait-Bryan axis
* @param axis2 second Tait-Bryan axis
* @param axis3 third Tait-Bryan axis
* @return a sequence of rotation angles around the relative input axes that produce a rotation equivalent
* to this instance
*/
private double[] getRelativeTaitBryanAngles(final Vector3D axis1, final Vector3D axis2, final Vector3D axis3) {
// We can use geometry to get the first and second angles pretty easily here by analyzing the positions
// of the transformed rotation axes. The third angle is trickier, but we can get it by treating it as
// if it were the first rotation in the inverse (which it would be).
final Vector3D vec3 = apply(axis3);
final Vector3D invVec1 = inverse().apply(axis1);
final double angle2Sin = vec3.dot(axis2.cross(axis3));
if (angle2Sin < -AXIS_ANGLE_SINGULARITY_THRESHOLD ||
angle2Sin > AXIS_ANGLE_SINGULARITY_THRESHOLD) {
final Vector3D vec2 = apply(axis2);
final double angle1TanY = vec2.dot(axis1.cross(axis2));
final double angle1TanX = vec2.dot(axis2);
final double angle2 = angle2Sin > AXIS_ANGLE_SINGULARITY_THRESHOLD ?
Angle.PI_OVER_TWO :
-Angle.PI_OVER_TWO;
return new double[] {
Math.atan2(angle1TanY, angle1TanX),
angle2,
0.0
};
}
final Vector3D crossAxis13 = axis1.cross(axis3);
final double angle1TanY = vec3.dot(crossAxis13);
final double angle1TanX = vec3.dot(axis3);
final double angle3TanY = invVec1.dot(crossAxis13);
final double angle3TanX = invVec1.dot(axis1);
return new double[] {
Math.atan2(angle1TanY, angle1TanX),
Math.asin(angle2Sin),
Math.atan2(angle3TanY, angle3TanX)
};
}
/** Get a sequence of angles around the given Tait-Bryan axes that produce a rotation equivalent
* to this instance. The axes are interpreted as being part of an absolute (unmoving) coordinate frame.
* @param axis1 first Tait-Bryan axis
* @param axis2 second Tait-Bryan axis
* @param axis3 third Tait-Bryan axis
* @return a sequence of rotation angles around the absolute input axes that produce a rotation equivalent
* to this instance
*/
private double[] getAbsoluteTaitBryanAngles(final Vector3D axis1, final Vector3D axis2, final Vector3D axis3) {
// A relative axis-angle rotation sequence is equivalent to an absolute one with the rotation
// sequence reversed, meaning we can reuse our relative logic here.
return reverseArray(getRelativeTaitBryanAngles(axis3, axis2, axis1));
}
/** Get a sequence of angles around the given Euler axes that produce a rotation equivalent
* to this instance. The axes are interpreted as being relative to the rotated coordinate frame. Only
* the first two axes are needed since, by definition, the first Euler angle axis is repeated as the
* third axis.
* @param axis1 first Euler axis
* @param axis2 second Euler axis
* @return a sequence of rotation angles around the relative input axes that produce a rotation equivalent
* to this instance
*/
private double[] getRelativeEulerAngles(final Vector3D axis1, final Vector3D axis2) {
// Use the same overall approach as with the Tait-Bryan angles: get the first two angles by looking
// at the transformed rotation axes and the third by using the inverse.
final Vector3D crossAxis = axis1.cross(axis2);
final Vector3D vec1 = apply(axis1);
final Vector3D invVec1 = inverse().apply(axis1);
final double angle2Cos = vec1.dot(axis1);
if (angle2Cos < -AXIS_ANGLE_SINGULARITY_THRESHOLD ||
angle2Cos > AXIS_ANGLE_SINGULARITY_THRESHOLD) {
final Vector3D vec2 = apply(axis2);
final double angle1TanY = vec2.dot(crossAxis);
final double angle1TanX = vec2.dot(axis2);
final double angle2 = angle2Cos > AXIS_ANGLE_SINGULARITY_THRESHOLD ? 0.0 : Math.PI;
return new double[] {
Math.atan2(angle1TanY, angle1TanX),
angle2,
0.0
};
}
final double angle1TanY = vec1.dot(axis2);
final double angle1TanX = -vec1.dot(crossAxis);
final double angle3TanY = invVec1.dot(axis2);
final double angle3TanX = invVec1.dot(crossAxis);
return new double[] {
Math.atan2(angle1TanY, angle1TanX),
Math.acos(angle2Cos),
Math.atan2(angle3TanY, angle3TanX)
};
}
/** Get a sequence of angles around the given Euler axes that produce a rotation equivalent
* to this instance. The axes are interpreted as being part of an absolute (unmoving) coordinate frame.
* Only the first two axes are needed since, by definition, the first Euler angle axis is repeated as
* the third axis.
* @param axis1 first Euler axis
* @param axis2 second Euler axis
* @return a sequence of rotation angles around the absolute input axes that produce a rotation equivalent
* to this instance
*/
private double[] getAbsoluteEulerAngles(final Vector3D axis1, final Vector3D axis2) {
// A relative axis-angle rotation sequence is equivalent to an absolute one with the rotation
// sequence reversed, meaning we can reuse our relative logic here.
return reverseArray(getRelativeEulerAngles(axis1, axis2));
}
/** Create a new instance from the given quaternion. The quaternion is normalized and
* converted to positive polar form (ie, with w >= 0).
*
* @param quat the quaternion to use for the rotation
* @return a new instance built from the given quaternion.
* @throws IllegalStateException if the norm of the given components is zero,
* NaN, or infinite
* @see Quaternion#normalize()
* @see Quaternion#positivePolarForm()
*/
public static QuaternionRotation of(final Quaternion quat) {
return new QuaternionRotation(quat);
}
/**
* Create a new instance from the given quaternion values. The inputs are
* normalized and converted to positive polar form (ie, with w >= 0).
*
* @param w quaternion scalar component
* @param x first quaternion vectorial component
* @param y second quaternion vectorial component
* @param z third quaternion vectorial component
* @return a new instance containing the normalized quaterion components
* @throws IllegalStateException if the norm of the given components is zero,
* NaN, or infinite
* @see Quaternion#normalize()
* @see Quaternion#positivePolarForm()
*/
public static QuaternionRotation of(final double w,
final double x,
final double y,
final double z) {
return of(Quaternion.of(w, x, y, z));
}
/** Return an instance representing a rotation of zero.
* @return instance representing a rotation of zero.
*/
public static QuaternionRotation identity() {
return IDENTITY_INSTANCE;
}
/** Create a new instance representing a rotation of {@code angle} radians around
* {@code axis}.
*
* <p>
* Rotation direction follows the right-hand rule, meaning that if one
* places their right hand such that the thumb points in the direction of the vector,
* the curl of the fingers indicates the direction of rotation.
* </p>
*
* <p>
* Note that the returned quaternion will represent the defined rotation but the values
* returned by {@link #getAxis()} and {@link #getAngle()} may not match the ones given here.
* This is because the axis and angle are normalized such that the axis has unit length,
* and the angle lies in the range {@code [0, pi]}. Depending on the inputs, the axis may
* need to be inverted in order for the angle to lie in this range.
* </p>
*
* @param axis the axis of rotation
* @param angle angle of rotation in radians
* @return a new instance representing the defined rotation
*
* @throws IllegalArgumentException if the given axis cannot be normalized or the angle is NaN or infinite
*/
public static QuaternionRotation fromAxisAngle(final Vector3D axis, final double angle) {
// reference formula:
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm
final Vector3D normAxis = axis.normalize();
if (!Double.isFinite(angle)) {
throw new IllegalArgumentException("Invalid angle: " + angle);
}
final double halfAngle = 0.5 * angle;
final double sinHalfAngle = Math.sin(halfAngle);
final double w = Math.cos(halfAngle);
final double x = sinHalfAngle * normAxis.getX();
final double y = sinHalfAngle * normAxis.getY();
final double z = sinHalfAngle * normAxis.getZ();
return of(w, x, y, z);
}
/** Return an instance that rotates the first vector to the second.
*
* <p>Except for a possible scale factor, if the returned instance is
* applied to vector {@code u}, it will produce the vector {@code v}. There are an
* infinite number of such rotations; this method chooses the one with the smallest
* associated angle, meaning the one whose axis is orthogonal to the {@code (u, v)}
* plane. If {@code u} and {@code v} are collinear, an arbitrary rotation axis is
* chosen.</p>
*
* @param u origin vector
* @param v target vector
* @return a new instance that rotates {@code u} to point in the direction of {@code v}
* @throws IllegalArgumentException if either vector has a norm of zero, NaN, or infinity
*/
public static QuaternionRotation createVectorRotation(final Vector3D u, final Vector3D v) {
final double normProduct = Vectors.checkedNorm(u) * Vectors.checkedNorm(v);
final double dot = u.dot(v);
if (dot < ANTIPARALLEL_DOT_THRESHOLD * normProduct) {
// Special case where u1 = -u2:
// create a pi angle rotation around
// an arbitrary unit vector orthogonal to u1
final Vector3D axis = u.orthogonal();
return of(0,
axis.getX(),
axis.getY(),
axis.getZ());
}
// General case:
// (u1, u2) defines a plane so rotate around the normal of the plane
// w must equal cos(theta/2); we can calculate this directly using values
// we already have with the identity cos(theta/2) = sqrt((1 + cos(theta)) / 2)
// and the fact that dot = norm(u1) * norm(u2) * cos(theta).
final double w = Math.sqrt(0.5 * (1.0 + (dot / normProduct)));
// The cross product u1 x u2 must be normalized and then multiplied by
// sin(theta/2) in order to set the vectorial part of the quaternion. To
// accomplish this, we'll use the following:
//
// 1) norm(a x b) = norm(a) * norm(b) * sin(theta)
// 2) sin(theta/2) = sqrt((1 - cos(theta)) / 2)
//
// Our full, combined normalization and sine half angle term factor then becomes:
//
// sqrt((1 - cos(theta)) / 2) / (norm(u1) * norm(u2) * sin(theta))
//
// This can be simplified to the expression below.
final double vectorialScaleFactor = 1.0 / (2.0 * w * normProduct);
final Vector3D axis = u.cross(v);
return of(w,
vectorialScaleFactor * axis.getX(),
vectorialScaleFactor * axis.getY(),
vectorialScaleFactor * axis.getZ());
}
/** Return an instance that rotates the basis defined by the first two vectors into the basis
* defined by the second two.
*
* <p>
* The given basis vectors do not have to be directly orthogonal. A right-handed orthonormal
* basis is created from each pair by normalizing the first vector, making the second vector
* orthogonal to the first, and then taking the cross product. A rotation is then calculated
* that rotates the first to the second.
* </p>
*
* @param u1 first vector of the source basis
* @param u2 second vector of the source basis
* @param v1 first vector of the target basis
* @param v2 second vector of the target basis
* @return an instance that rotates the source basis to the target basis
* @throws IllegalArgumentException if any of the input vectors cannot be normalized
* or the vectors defining either basis are collinear
*/
public static QuaternionRotation createBasisRotation(final Vector3D u1, final Vector3D u2,
final Vector3D v1, final Vector3D v2) {
// calculate orthonormalized bases
final Vector3D a = u1.normalize();
final Vector3D b = a.orthogonal(u2);
final Vector3D c = a.cross(b);
final Vector3D d = v1.normalize();
final Vector3D e = d.orthogonal(v2);
final Vector3D f = d.cross(e);
// create an orthogonal rotation matrix representing the change of basis; this matrix will
// be the multiplication of the matrix composed of the column vectors d, e, f and the
// inverse of the matrix composed of the column vectors a, b, c (which is simply the transpose since
// it's orthogonal).
final double m00 = Vectors.linearCombination(d.getX(), a.getX(), e.getX(), b.getX(), f.getX(), c.getX());
final double m01 = Vectors.linearCombination(d.getX(), a.getY(), e.getX(), b.getY(), f.getX(), c.getY());
final double m02 = Vectors.linearCombination(d.getX(), a.getZ(), e.getX(), b.getZ(), f.getX(), c.getZ());
final double m10 = Vectors.linearCombination(d.getY(), a.getX(), e.getY(), b.getX(), f.getY(), c.getX());
final double m11 = Vectors.linearCombination(d.getY(), a.getY(), e.getY(), b.getY(), f.getY(), c.getY());
final double m12 = Vectors.linearCombination(d.getY(), a.getZ(), e.getY(), b.getZ(), f.getY(), c.getZ());
final double m20 = Vectors.linearCombination(d.getZ(), a.getX(), e.getZ(), b.getX(), f.getZ(), c.getX());
final double m21 = Vectors.linearCombination(d.getZ(), a.getY(), e.getZ(), b.getY(), f.getZ(), c.getY());
final double m22 = Vectors.linearCombination(d.getZ(), a.getZ(), e.getZ(), b.getZ(), f.getZ(), c.getZ());
return orthogonalRotationMatrixToQuaternion(
m00, m01, m02,
m10, m11, m12,
m20, m21, m22
);
}
/** Create a new instance equivalent to the given sequence of axis-angle rotations.
* @param sequence the axis-angle rotation sequence to convert to a quaternion rotation
* @return instance representing a rotation equivalent to the given axis-angle sequence
*/
public static QuaternionRotation fromAxisAngleSequence(final AxisAngleSequence sequence) {
final AxisSequence axes = sequence.getAxisSequence();
final QuaternionRotation q1 = fromAxisAngle(axes.getAxis1(), sequence.getAngle1());
final QuaternionRotation q2 = fromAxisAngle(axes.getAxis2(), sequence.getAngle2());
final QuaternionRotation q3 = fromAxisAngle(axes.getAxis3(), sequence.getAngle3());
if (sequence.getReferenceFrame() == AxisReferenceFrame.ABSOLUTE) {
return q3.multiply(q2).multiply(q1);
}
return q1.multiply(q2).multiply(q3);
}
/** Create an instance from an orthogonal rotation matrix.
*
* @param m00 matrix entry <code>m<sub>0,0</sub></code>
* @param m01 matrix entry <code>m<sub>0,1</sub></code>
* @param m02 matrix entry <code>m<sub>0,2</sub></code>
* @param m10 matrix entry <code>m<sub>1,0</sub></code>
* @param m11 matrix entry <code>m<sub>1,1</sub></code>
* @param m12 matrix entry <code>m<sub>1,2</sub></code>
* @param m20 matrix entry <code>m<sub>2,0</sub></code>
* @param m21 matrix entry <code>m<sub>2,1</sub></code>
* @param m22 matrix entry <code>m<sub>2,2</sub></code>
* @return an instance representing the same 3D rotation as the given matrix
*/
private static QuaternionRotation orthogonalRotationMatrixToQuaternion(
final double m00, final double m01, final double m02,
final double m10, final double m11, final double m12,
final double m20, final double m21, final double m22) {
// reference formula:
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
// The overall approach here is to take the equations for converting a quaternion to
// a matrix along with the fact that 1 = x^2 + y^2 + z^2 + w^2 for a normalized quaternion
// and solve for the various terms. This can theoretically be done using just the diagonal
// terms from the matrix. However, there are a few issues with this:
// 1) The term that we end up taking the square root of may be negative.
// 2) It's ambiguous as to whether we should use a plus or minus for the value of the
// square root.
// We'll address these concerns by only calculating a single term from one of the diagonal
// elements and then calculate the rest from the non-diagonals, which do not involve
// a square root. This solves the first issue since we can make sure to choose a diagonal
// element that will not cause us to take a square root of a negative number. The second
// issue is solved since only the relative signs between the quaternion terms are important
// (q and -q represent the same 3D rotation). It therefore doesn't matter whether we choose
// a plus or minus for our initial square root solution.
final double trace = m00 + m11 + m22;
final double w;
final double x;
final double y;
final double z;
if (trace > 0) {
// let s = 4*w
final double s = 2.0 * Math.sqrt(1.0 + trace);
final double sinv = 1.0 / s;
x = (m21 - m12) * sinv;
y = (m02 - m20) * sinv;
z = (m10 - m01) * sinv;
w = 0.25 * s;
} else if ((m00 > m11) && (m00 > m22)) {
// let s = 4*x
final double s = 2.0 * Math.sqrt(1.0 + m00 - m11 - m22);
final double sinv = 1.0 / s;
x = 0.25 * s;
y = (m01 + m10) * sinv;
z = (m02 + m20) * sinv;
w = (m21 - m12) * sinv;
} else if (m11 > m22) {
// let s = 4*y
final double s = 2.0 * Math.sqrt(1.0 + m11 - m00 - m22);
final double sinv = 1.0 / s;
x = (m01 + m10) * sinv;
y = 0.25 * s;
z = (m21 + m12) * sinv;
w = (m02 - m20) * sinv;
} else {
// let s = 4*z
final double s = 2.0 * Math.sqrt(1.0 + m22 - m00 - m11);
final double sinv = 1.0 / s;
x = (m02 + m20) * sinv;
y = (m21 + m12) * sinv;
z = 0.25 * s;
w = (m10 - m01) * sinv;
}
return of(w, x, y, z);
}
/** Reverse the elements in {@code arr}. The array is returned.
* @param arr the array to reverse
* @return the input array with the elements reversed
*/
private static double[] reverseArray(final double[] arr) {
final int len = arr.length;
double temp;
int i;
int j;
for (i = 0, j = len - 1; i < len / 2; ++i, --j) {
temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
return arr;
}
}
|
apache/druid | 35,581 | server/src/main/java/org/apache/druid/segment/loading/SegmentLocalCacheManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.loading;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Suppliers;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import com.google.inject.Inject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.druid.error.DruidException;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.ReferenceCountedObjectProvider;
import org.apache.druid.segment.ReferenceCountedSegmentProvider;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.SegmentLazyLoadFailCallback;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.utils.CloseableUtils;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Lock;
import java.util.function.Supplier;
/**
*
*/
public class SegmentLocalCacheManager implements SegmentCacheManager
{
@VisibleForTesting
static final String DOWNLOAD_START_MARKER_FILE_NAME = "downloadStartMarker";
private static final EmittingLogger log = new EmittingLogger(SegmentLocalCacheManager.class);
private final SegmentLoaderConfig config;
private final ObjectMapper jsonMapper;
private final List<StorageLocation> locations;
/**
* A map between segment and referenceCountingLocks.
* <p>
* These locks should be acquired whenever assigning a segment to a location. If different threads try to load
* segments simultaneously, one of them creates a lock first using {@link #lock(DataSegment)}. And then, all threads
* compete with each other to get the lock. Finally, the lock should be released using
* {@link #unlock(DataSegment, ReferenceCountingLock)}. A lock must be acquired any time a {@link SegmentCacheEntry}
* needs to be assigned to a {@link StorageLocation}.
* <p>
* An example usage is:
* <p>
* final ReferenceCountingLock lock = lock(dataSegment);
* synchronized (lock) {
* try {
* // assign location
* ...
* }
* finally {
* unlock(dataSegment, lock);
* }
* }
*/
private final ConcurrentHashMap<DataSegment, ReferenceCountingLock> segmentLocks = new ConcurrentHashMap<>();
private final StorageLocationSelectorStrategy strategy;
private final IndexIO indexIO;
private final ListeningExecutorService virtualStorageLoadOnDemandExec;
private ExecutorService loadOnBootstrapExec = null;
private ExecutorService loadOnDownloadExec = null;
@Inject
public SegmentLocalCacheManager(
List<StorageLocation> locations,
SegmentLoaderConfig config,
@Nonnull StorageLocationSelectorStrategy strategy,
IndexIO indexIO,
@Json ObjectMapper mapper
)
{
this.config = config;
this.jsonMapper = mapper;
this.locations = locations;
this.strategy = strategy;
this.indexIO = indexIO;
log.info("Using storage location strategy[%s].", this.strategy.getClass().getSimpleName());
if (config.isVirtualStorage()) {
log.info(
"Using virtual storage mode - on demand load threads: [%d].",
config.getVirtualStorageLoadThreads()
);
if (config.getNumThreadsToLoadSegmentsIntoPageCacheOnDownload() > 0) {
throw DruidException.defensive("Invalid configuration: virtualStorage is incompatible with numThreadsToLoadSegmentsIntoPageCacheOnDownload");
}
if (config.getNumThreadsToLoadSegmentsIntoPageCacheOnBootstrap() > 0) {
throw DruidException.defensive("Invalid configuration: virtualStorage is incompatible with numThreadsToLoadSegmentsIntoPageCacheOnBootstrap");
}
virtualStorageLoadOnDemandExec =
MoreExecutors.listeningDecorator(
// probably replace this with virtual threads once minimum version is java 21
Executors.newFixedThreadPool(
config.getVirtualStorageLoadThreads(),
Execs.makeThreadFactory("VirtualStorageOnDemandLoadingThread-%s")
)
);
} else {
log.info(
"Number of threads to load segments into page cache - on bootstrap: [%d], on download: [%d].",
config.getNumThreadsToLoadSegmentsIntoPageCacheOnBootstrap(),
config.getNumThreadsToLoadSegmentsIntoPageCacheOnDownload()
);
if (config.getNumThreadsToLoadSegmentsIntoPageCacheOnBootstrap() > 0) {
loadOnBootstrapExec = Execs.multiThreaded(
config.getNumThreadsToLoadSegmentsIntoPageCacheOnBootstrap(),
"Load-SegmentsIntoPageCacheOnBootstrap-%s"
);
}
if (config.getNumThreadsToLoadSegmentsIntoPageCacheOnDownload() > 0) {
loadOnDownloadExec = Executors.newFixedThreadPool(
config.getNumThreadsToLoadSegmentsIntoPageCacheOnDownload(),
Execs.makeThreadFactory("LoadSegmentsIntoPageCacheOnDownload-%s")
);
}
virtualStorageLoadOnDemandExec = null;
}
}
@Override
public boolean canHandleSegments()
{
final boolean isLocationsValid = !(locations == null || locations.isEmpty());
final boolean isLocationsConfigValid = !(config.getLocations() == null || config.getLocations().isEmpty());
return isLocationsValid || isLocationsConfigValid;
}
@Override
public List<DataSegment> getCachedSegments() throws IOException
{
if (!canHandleSegments()) {
throw DruidException.defensive(
"canHandleSegments() is false. getCachedSegments() must be invoked only when canHandleSegments() returns true."
);
}
final File infoDir = getEffectiveInfoDir();
FileUtils.mkdirp(infoDir);
final List<DataSegment> cachedSegments = new ArrayList<>();
final File[] segmentsToLoad = infoDir.listFiles();
int ignored = 0;
for (int i = 0; i < segmentsToLoad.length; i++) {
final File file = segmentsToLoad[i];
log.info("Loading segment cache file [%d/%d][%s].", i + 1, segmentsToLoad.length, file);
try {
final DataSegment segment = jsonMapper.readValue(file, DataSegment.class);
boolean removeInfo = false;
if (!segment.getId().toString().equals(file.getName())) {
log.warn("Ignoring cache file[%s] for segment[%s].", file.getPath(), segment.getId());
ignored++;
} else {
removeInfo = true;
final SegmentCacheEntry cacheEntry = new SegmentCacheEntry(segment);
for (StorageLocation location : locations) {
// check for migrate from old nested local storage path format
final File legacyPath = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment, false));
if (legacyPath.exists()) {
final File destination = cacheEntry.toPotentialLocation(location.getPath());
FileUtils.mkdirp(destination);
final File[] oldFiles = legacyPath.listFiles();
final File[] newFiles = destination.listFiles();
// make sure old files exist and new files do not exist
if (oldFiles != null && oldFiles.length > 0 && newFiles != null && newFiles.length == 0) {
Files.move(legacyPath.toPath(), destination.toPath(), StandardCopyOption.ATOMIC_MOVE);
}
cleanupLegacyCacheLocation(location.getPath(), legacyPath);
}
if (cacheEntry.checkExists(location.getPath())) {
removeInfo = false;
final boolean reserveResult;
if (config.isVirtualStorage()) {
reserveResult = location.reserveWeak(cacheEntry);
} else {
reserveResult = location.reserve(cacheEntry);
}
if (!reserveResult) {
log.makeAlert(
"storage[%s:%,d] has more segments than it is allowed. Currently loading Segment[%s:%,d]. Please increase druid.segmentCache.locations maxSize param",
location.getPath(),
location.availableSizeBytes(),
segment.getId(),
segment.getSize()
).emit();
}
cachedSegments.add(segment);
}
}
}
if (removeInfo) {
final SegmentId segmentId = segment.getId();
log.warn("Unable to find cache file for segment[%s]. Deleting lookup entry.", segmentId);
removeInfoFile(segment);
}
}
catch (Exception e) {
log.makeAlert(e, "Failed to load segment from segment cache file.")
.addData("file", file)
.emit();
}
}
if (ignored > 0) {
log.makeAlert("Ignored misnamed segment cache files on startup.")
.addData("numIgnored", ignored)
.emit();
}
return cachedSegments;
}
@Override
public void storeInfoFile(final DataSegment segment) throws IOException
{
final File segmentInfoCacheFile = new File(getEffectiveInfoDir(), segment.getId().toString());
if (!segmentInfoCacheFile.exists()) {
jsonMapper.writeValue(segmentInfoCacheFile, segment);
}
}
@Override
public void removeInfoFile(final DataSegment segment)
{
final File segmentInfoCacheFile = new File(getEffectiveInfoDir(), segment.getId().toString());
if (!segmentInfoCacheFile.delete()) {
log.warn("Unable to delete cache file[%s] for segment[%s].", segmentInfoCacheFile, segment.getId());
}
}
@Override
public Optional<Segment> acquireCachedSegment(final DataSegment dataSegment)
{
final SegmentCacheEntryIdentifier cacheEntryIdentifier = new SegmentCacheEntryIdentifier(dataSegment.getId());
for (StorageLocation location : locations) {
final SegmentCacheEntry cacheEntry = location.getStaticCacheEntry(cacheEntryIdentifier);
if (cacheEntry != null) {
return cacheEntry.acquireReference();
}
final StorageLocation.ReservationHold<SegmentCacheEntry> hold =
location.addWeakReservationHoldIfExists(cacheEntryIdentifier);
try {
if (hold != null) {
if (hold.getEntry().isMounted()) {
Optional<Segment> segment = hold.getEntry().acquireReference();
if (segment.isPresent()) {
return ReferenceCountedSegmentProvider.wrapCloseable(
(ReferenceCountedSegmentProvider.LeafReference) segment.get(),
hold
);
}
}
hold.close();
}
}
catch (Throwable e) {
hold.close();
throw e;
}
}
return Optional.empty();
}
@Override
public AcquireSegmentAction acquireSegment(final DataSegment dataSegment) throws SegmentLoadingException
{
final SegmentCacheEntryIdentifier identifier = new SegmentCacheEntryIdentifier(dataSegment.getId());
final AcquireSegmentAction acquireExisting = acquireExistingSegment(identifier);
if (acquireExisting != null) {
return acquireExisting;
}
final ReferenceCountingLock lock = lock(dataSegment);
synchronized (lock) {
try {
final AcquireSegmentAction retryAcquireExisting = acquireExistingSegment(identifier);
if (retryAcquireExisting != null) {
return retryAcquireExisting;
}
final Iterator<StorageLocation> iterator = strategy.getLocations();
while (iterator.hasNext()) {
final StorageLocation location = iterator.next();
final StorageLocation.ReservationHold<SegmentCacheEntry> hold = location.addWeakReservationHold(
identifier,
() -> new SegmentCacheEntry(dataSegment)
);
try {
if (hold != null) {
return new AcquireSegmentAction(
makeOnDemandLoadSupplier(hold.getEntry(), location),
hold
);
}
}
catch (Throwable t) {
throw CloseableUtils.closeAndWrapInCatch(t, hold);
}
}
throw DruidException.forPersona(DruidException.Persona.USER)
.ofCategory(DruidException.Category.CAPACITY_EXCEEDED)
.build(
"Unable to load segment[%s] on demand, ensure enough disk space has been allocated to load all segments involved in the query",
dataSegment.getId()
);
}
finally {
unlock(dataSegment, lock);
}
}
}
@Nullable
private AcquireSegmentAction acquireExistingSegment(SegmentCacheEntryIdentifier identifier)
{
final Closer safetyNet = Closer.create();
for (StorageLocation location : locations) {
try {
final StorageLocation.ReservationHold<SegmentCacheEntry> hold = safetyNet.register(
location.addWeakReservationHoldIfExists(identifier)
);
if (hold != null) {
if (hold.getEntry().isMounted()) {
return new AcquireSegmentAction(
() -> Futures.immediateFuture(hold.getEntry().referenceProvider),
hold
);
} else {
// go ahead and mount it, someone else is probably trying this as well, but mount is done under a segment
// lock and is a no-op if already mounted, and if we win we need it to be mounted
return new AcquireSegmentAction(
makeOnDemandLoadSupplier(hold.getEntry(), location),
hold
);
}
}
}
catch (Throwable t) {
throw CloseableUtils.closeAndWrapInCatch(t, safetyNet);
}
}
return null;
}
@Override
public void load(final DataSegment dataSegment) throws SegmentLoadingException
{
if (config.isVirtualStorage()) {
// no-op, we'll do a load when someone asks for the segment
return;
}
final SegmentCacheEntry cacheEntry = new SegmentCacheEntry(dataSegment);
final ReferenceCountingLock lock = lock(dataSegment);
synchronized (lock) {
try {
final SegmentCacheEntry entry = assignLocationAndMount(cacheEntry, SegmentLazyLoadFailCallback.NOOP);
if (loadOnDownloadExec != null) {
loadOnDownloadExec.submit(entry::loadIntoPageCache);
}
}
finally {
unlock(dataSegment, lock);
}
}
}
@Override
public void bootstrap(
final DataSegment dataSegment,
final SegmentLazyLoadFailCallback loadFailed
) throws SegmentLoadingException
{
if (config.isVirtualStorage()) {
// during bootstrap, check if the segment exists in a location and mount it, getCachedSegments already
// did the reserving for us
final SegmentCacheEntryIdentifier id = new SegmentCacheEntryIdentifier(dataSegment.getId());
final ReferenceCountingLock lock = lock(dataSegment);
synchronized (lock) {
try {
for (StorageLocation location : locations) {
final SegmentCacheEntry entry = location.getCacheEntry(id);
if (entry != null) {
entry.lazyLoadCallback = loadFailed;
entry.mount(location);
}
}
}
finally {
unlock(dataSegment, lock);
}
}
return;
}
final ReferenceCountingLock lock = lock(dataSegment);
synchronized (lock) {
try {
final SegmentCacheEntry entry = assignLocationAndMount(new SegmentCacheEntry(dataSegment), loadFailed);
if (loadOnBootstrapExec != null) {
loadOnBootstrapExec.submit(entry::loadIntoPageCache);
}
}
finally {
unlock(dataSegment, lock);
}
}
}
@Nullable
@Override
public File getSegmentFiles(final DataSegment segment)
{
final SegmentCacheEntry cacheEntry = new SegmentCacheEntry(segment);
final ReferenceCountingLock lock = lock(segment);
synchronized (lock) {
try {
for (StorageLocation location : locations) {
final SegmentCacheEntry entry = location.getCacheEntry(cacheEntry.id);
if (entry != null) {
return entry.storageDir;
}
}
}
finally {
unlock(segment, lock);
}
}
return null;
}
@Override
public void drop(final DataSegment segment)
{
final SegmentCacheEntryIdentifier id = new SegmentCacheEntryIdentifier(segment.getId());
for (StorageLocation location : locations) {
final SegmentCacheEntry entry = location.getCacheEntry(id);
if (entry != null) {
location.release(entry);
}
}
}
@Override
public void shutdownBootstrap()
{
if (loadOnBootstrapExec == null) {
return;
}
loadOnBootstrapExec.shutdown();
}
@Override
public void shutdown()
{
if (loadOnDownloadExec != null) {
loadOnDownloadExec.shutdown();
}
if (virtualStorageLoadOnDemandExec != null) {
virtualStorageLoadOnDemandExec.shutdown();
}
}
@VisibleForTesting
public ConcurrentHashMap<DataSegment, ReferenceCountingLock> getSegmentLocks()
{
return segmentLocks;
}
@VisibleForTesting
List<StorageLocation> getLocations()
{
return locations;
}
/**
* Checks whether a segment is already cached. This method does not confirm if the segment is actually mounted in
* the location, or even that the segment files in some location are valid, just that some files exist in the
* specified location
*/
@VisibleForTesting
boolean isSegmentCached(final DataSegment segment)
{
final SegmentCacheEntry cacheEntry = new SegmentCacheEntry(segment);
for (StorageLocation location : locations) {
if (cacheEntry.checkExists(location.getPath())) {
return true;
}
}
return false;
}
/**
* Returns the effective segment info directory based on the configuration settings.
* The directory is selected based on the following configurations injected into this class:
* <ul>
* <li>{@link SegmentLoaderConfig#getInfoDir()} - If {@code infoDir} is set, it is used as the info directory.</li>
* <li>{@link SegmentLoaderConfig#getLocations()} - If the info directory is not set, the first location from this list is used.</li>
* <li>List of {@link StorageLocation}s injected - If both the info directory and locations list are not set, the
* first storage location is used.</li>
* </ul>
*
* @throws DruidException if none of the configurations are set, and the info directory cannot be determined.
*/
private File getEffectiveInfoDir()
{
final File infoDir;
if (config.getInfoDir() != null) {
infoDir = config.getInfoDir();
} else if (!config.getLocations().isEmpty()) {
infoDir = new File(config.getLocations().get(0).getPath(), "info_dir");
} else if (!locations.isEmpty()) {
infoDir = new File(locations.get(0).getPath(), "info_dir");
} else {
throw DruidException.forPersona(DruidException.Persona.OPERATOR)
.ofCategory(DruidException.Category.NOT_FOUND)
.build("Could not determine infoDir. Make sure 'druid.segmentCache.infoDir' "
+ "or 'druid.segmentCache.locations' is set correctly.");
}
return infoDir;
}
private Supplier<ListenableFuture<ReferenceCountedObjectProvider<Segment>>> makeOnDemandLoadSupplier(
final SegmentCacheEntry entry,
final StorageLocation location
)
{
return Suppliers.memoize(
() -> virtualStorageLoadOnDemandExec.submit(
() -> {
entry.mount(location);
return entry.referenceProvider;
}
)
);
}
private ReferenceCountingLock lock(final DataSegment dataSegment)
{
return segmentLocks.compute(
dataSegment,
(segment, lock) -> {
final ReferenceCountingLock nonNullLock;
if (lock == null) {
nonNullLock = new ReferenceCountingLock();
} else {
nonNullLock = lock;
}
nonNullLock.increment();
return nonNullLock;
}
);
}
private void unlock(final DataSegment dataSegment, final ReferenceCountingLock lock)
{
segmentLocks.compute(
dataSegment,
(segment, existingLock) -> {
if (existingLock == null) {
throw new ISE("Lock has already been removed");
} else if (existingLock != lock) {
throw new ISE("Different lock instance");
} else {
if (existingLock.numReferences == 1) {
return null;
} else {
existingLock.decrement();
return existingLock;
}
}
}
);
}
private SegmentCacheEntry assignLocationAndMount(
final SegmentCacheEntry cacheEntry,
final SegmentLazyLoadFailCallback segmentLoadFailCallback
) throws SegmentLoadingException
{
try {
for (StorageLocation location : locations) {
if (cacheEntry.checkExists(location.getPath())) {
if (location.isReserved(cacheEntry.id) || location.reserve(cacheEntry)) {
final SegmentCacheEntry entry = location.getCacheEntry(cacheEntry.id);
entry.lazyLoadCallback = segmentLoadFailCallback;
entry.mount(location);
return entry;
} else {
// entry is not reserved, clean it up
deleteCacheEntryDirectory(cacheEntry.toPotentialLocation(location.getPath()));
}
}
}
}
catch (SegmentLoadingException e) {
log.warn(e, "Failed to load segment[%s] in existing location, trying new location", cacheEntry.id);
}
final Iterator<StorageLocation> locationsIterator = strategy.getLocations();
while (locationsIterator.hasNext()) {
final StorageLocation location = locationsIterator.next();
if (location.reserve(cacheEntry)) {
try {
final SegmentCacheEntry entry = location.getCacheEntry(cacheEntry.id);
entry.lazyLoadCallback = segmentLoadFailCallback;
entry.mount(location);
return entry;
}
catch (SegmentLoadingException e) {
log.warn(e, "Failed to load segment[%s] in location[%s], trying next location", cacheEntry.id, location.getPath());
}
}
}
throw new SegmentLoadingException("Failed to load segment[%s] in all locations.", cacheEntry.id);
}
/**
* Deletes a directory and logs about it. This method should only be called under the lock of a {@link #segmentLocks}
*/
private static void deleteCacheEntryDirectory(final File path)
{
log.info("Deleting directory[%s]", path);
try {
FileUtils.deleteDirectory(path);
}
catch (Exception e) {
log.error(e, "Unable to remove directory[%s]", path);
}
}
/**
* Calls {@link #deleteCacheEntryDirectory(File)} and then checks parent path if it is empty, and recursively
* continues until a non-empty directory or the base path is reached. This method is not thread-safe, and should only
* be used by a single caller.
*/
private static void cleanupLegacyCacheLocation(final File baseFile, final File cacheFile)
{
if (cacheFile.equals(baseFile)) {
return;
}
deleteCacheEntryDirectory(cacheFile);
File parent = cacheFile.getParentFile();
if (parent != null) {
File[] children = parent.listFiles();
if (children == null || children.length == 0) {
cleanupLegacyCacheLocation(baseFile, parent);
}
}
}
/**
* check if segment data is possibly corrupted.
* @param dir segments cache dir
* @return true means segment files may be damaged.
*/
private static boolean isPossiblyCorrupted(final File dir)
{
return hasStartMarker(dir);
}
/**
* If {@link #DOWNLOAD_START_MARKER_FILE_NAME} exists in the path, the segment files might be damaged because this
* file is typically deleted after the segment is pulled from deep storage.
*/
private static boolean hasStartMarker(final File localStorageDir)
{
final File downloadStartMarker = new File(localStorageDir.getPath(), DOWNLOAD_START_MARKER_FILE_NAME);
return downloadStartMarker.exists();
}
private static final class ReferenceCountingLock
{
private int numReferences;
private void increment()
{
++numReferences;
}
private void decrement()
{
--numReferences;
}
}
private final class SegmentCacheEntry implements CacheEntry
{
private final SegmentCacheEntryIdentifier id;
private final DataSegment dataSegment;
private final String relativePathString;
private SegmentLazyLoadFailCallback lazyLoadCallback = SegmentLazyLoadFailCallback.NOOP;
private StorageLocation location;
private File storageDir;
private ReferenceCountedSegmentProvider referenceProvider;
private SegmentCacheEntry(final DataSegment dataSegment)
{
this.dataSegment = dataSegment;
this.id = new SegmentCacheEntryIdentifier(dataSegment.getId());
this.relativePathString = dataSegment.getId().toString();
}
@Override
public SegmentCacheEntryIdentifier getId()
{
return id;
}
@Override
public long getSize()
{
return dataSegment.getSize();
}
@Override
public synchronized boolean isMounted()
{
return referenceProvider != null;
}
@Override
public void mount(StorageLocation mountLocation) throws SegmentLoadingException
{
// check to see if we should still be mounting by making sure we are still reserved in the location
// this is not done under a lock of the location, and that is ok.. we will check again at the end to prevent any
// orphaned files
if (!mountLocation.isReserved(this.id) && !mountLocation.isWeakReserved(this.id)) {
log.debug(
"aborting mount in location[%s] since entry[%s] is no longer reserved",
mountLocation.getPath(),
this.id
);
return;
}
try {
synchronized (this) {
if (location != null) {
log.debug(
"already mounted [%s] in location[%s], but asked to load in [%s], unmounting old location",
id,
location.getPath(),
mountLocation.getPath()
);
if (!location.equals(mountLocation)) {
throw DruidException.defensive(
"already mounted[%s] in location[%s] which is different from requested[%s]",
id,
location.getPath(),
mountLocation.getPath()
);
} else {
log.debug("already mounted [%s] in location[%s]", id, mountLocation.getPath());
return;
}
}
location = mountLocation;
storageDir = new File(location.getPath(), relativePathString);
boolean needsLoad = true;
if (storageDir.exists()) {
if (isPossiblyCorrupted(storageDir)) {
log.warn(
"[%s] may be damaged. Delete all the segment files and pull from DeepStorage again.",
storageDir.getAbsolutePath()
);
deleteCacheEntryDirectory(storageDir);
} else {
needsLoad = false;
}
}
if (needsLoad) {
loadInLocationWithStartMarker(dataSegment, storageDir);
}
final SegmentizerFactory factory = getSegmentFactory(storageDir);
final Segment segment = factory.factorize(dataSegment, storageDir, false, lazyLoadCallback);
// wipe load callback after calling
lazyLoadCallback = SegmentLazyLoadFailCallback.NOOP;
referenceProvider = ReferenceCountedSegmentProvider.of(segment);
}
// since we do not hold a lock on the location while mounting, make sure that we actually are reserved and
// should have mounted, otherwise unmount so we don't leave any orphaned files
if (!mountLocation.isReserved(this.id) && !mountLocation.isWeakReserved(this.id)) {
log.debug(
"aborting mount in location[%s] since entry[%s] is no longer reserved",
mountLocation.getPath(),
this.id
);
unmount();
}
}
catch (SegmentLoadingException e) {
try {
log.makeAlert(
e,
"Failed to load segment in current location [%s], try next location if any",
location.getPath().getAbsolutePath()
).addData("location", location.getPath().getAbsolutePath()).emit();
throw new SegmentLoadingException(
"Failed to load segment[%s] in reserved location[%s]",
dataSegment.getId(),
location.getPath().getAbsolutePath()
);
}
finally {
unmount();
}
}
catch (Throwable t) {
unmount();
throw t;
}
}
@Override
public void unmount()
{
final Lock lock;
synchronized (this) {
if (location == null) {
return;
}
lock = location.getLock().readLock();
}
lock.lock();
try {
synchronized (this) {
if (referenceProvider != null) {
referenceProvider.close();
referenceProvider = null;
}
if (!config.isDeleteOnRemove()) {
return;
}
if (storageDir != null) {
deleteCacheEntryDirectory(storageDir);
storageDir = null;
location = null;
}
}
}
finally {
lock.unlock();
}
}
public synchronized Optional<Segment> acquireReference()
{
if (referenceProvider == null) {
return Optional.empty();
}
return referenceProvider.acquireReference();
}
public void loadIntoPageCache()
{
if (!isMounted()) {
return;
}
synchronized (this) {
final File[] children = storageDir.listFiles();
if (children != null) {
for (File child : children) {
try (InputStream in = Files.newInputStream(child.toPath())) {
IOUtils.copy(in, NullOutputStream.NULL_OUTPUT_STREAM);
log.info("Loaded [%s] into page cache.", child.getAbsolutePath());
}
catch (Exception e) {
log.error(e, "Failed to load [%s] into page cache", child.getAbsolutePath());
}
}
}
}
}
public boolean checkExists(final File location)
{
return toPotentialLocation(location).exists();
}
public File toPotentialLocation(final File location)
{
return new File(location, relativePathString);
}
@GuardedBy("this")
private void loadInLocationWithStartMarker(final DataSegment segment, final File storageDir)
throws SegmentLoadingException
{
// We use a marker to prevent the case where a segment is downloaded, but before the download completes,
// the parent directories of the segment are removed
final File downloadStartMarker = new File(storageDir, DOWNLOAD_START_MARKER_FILE_NAME);
try {
FileUtils.mkdirp(storageDir);
if (!downloadStartMarker.createNewFile()) {
throw new SegmentLoadingException("Was not able to create new download marker for [%s]", storageDir);
}
loadInLocation(segment, storageDir);
if (!downloadStartMarker.delete()) {
throw new SegmentLoadingException("Unable to remove marker file for [%s]", storageDir);
}
}
catch (IOException e) {
throw new SegmentLoadingException(e, "Unable to create marker file for [%s]", storageDir);
}
}
@GuardedBy("this")
private void loadInLocation(final DataSegment segment, final File storageDir)
throws SegmentLoadingException
{
// LoadSpec isn't materialized until here so that any system can interpret Segment without having to have all the
// LoadSpec dependencies.
final LoadSpec loadSpec = jsonMapper.convertValue(segment.getLoadSpec(), LoadSpec.class);
final LoadSpec.LoadSpecResult result = loadSpec.loadSegment(storageDir);
if (result.getSize() != segment.getSize()) {
log.warn(
"Segment [%s] is different than expected size. Expected [%d] found [%d]",
segment.getId(),
segment.getSize(),
result.getSize()
);
}
}
@GuardedBy("this")
private SegmentizerFactory getSegmentFactory(final File segmentFiles) throws SegmentLoadingException
{
final File factoryJson = new File(segmentFiles, "factory.json");
final SegmentizerFactory factory;
if (factoryJson.exists()) {
try {
factory = jsonMapper.readValue(factoryJson, SegmentizerFactory.class);
}
catch (IOException e) {
throw new SegmentLoadingException(e, "Failed to get segment factory for %s", e.getMessage());
}
} else {
factory = new MMappedQueryableSegmentizerFactory(indexIO);
}
return factory;
}
@Override
public boolean equals(Object o)
{
if (o == null || getClass() != o.getClass()) {
return false;
}
SegmentCacheEntry that = (SegmentCacheEntry) o;
return Objects.equals(dataSegment, that.dataSegment);
}
@Override
public int hashCode()
{
return Objects.hashCode(dataSegment);
}
}
}
|
apache/iceberg | 35,105 | spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.sql;
import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.TableProperties;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.hive.TestHiveMetastore;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.CatalogTestBase;
import org.apache.iceberg.spark.SparkReadOptions;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.ExplainMode;
import org.apache.spark.sql.functions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(ParameterizedTestExtension.class)
public class TestAggregatePushDown extends CatalogTestBase {
@BeforeAll
public static void startMetastoreAndSpark() {
TestBase.metastore = new TestHiveMetastore();
metastore.start();
TestBase.hiveConf = metastore.hiveConf();
TestBase.spark.stop();
TestBase.spark =
SparkSession.builder()
.master("local[2]")
.config("spark.sql.iceberg.aggregate_pushdown", "true")
.enableHiveSupport()
.getOrCreate();
TestBase.catalog =
(HiveCatalog)
CatalogUtil.loadCatalog(
HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf);
try {
catalog.createNamespace(Namespace.of("default"));
} catch (AlreadyExistsException ignored) {
// the default namespace already exists. ignore the create error
}
}
@AfterEach
public void removeTables() {
sql("DROP TABLE IF EXISTS %s", tableName);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInPartitionedTable() {
testDifferentDataTypesAggregatePushDown(true);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInNonPartitionedTable() {
testDifferentDataTypesAggregatePushDown(false);
}
@SuppressWarnings("checkstyle:CyclomaticComplexity")
private void testDifferentDataTypesAggregatePushDown(boolean hasPartitionCol) {
String createTable;
if (hasPartitionCol) {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg PARTITIONED BY (id)";
} else {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES "
+ "(1, null, false, null, null, 11.11, X'1111'),"
+ " (1, null, true, 2.222, 2.222222, 22.22, X'2222'),"
+ " (2, 33, false, 3.333, 3.333333, 33.33, X'3333'),"
+ " (2, 44, true, null, 4.444444, 44.44, X'4444'),"
+ " (3, 55, false, 5.555, 5.555555, 55.55, X'5555'),"
+ " (3, null, true, null, 6.666666, 66.66, null) ",
tableName);
String select =
"SELECT count(*), max(id), min(id), count(id), "
+ "max(int_data), min(int_data), count(int_data), "
+ "max(boolean_data), min(boolean_data), count(boolean_data), "
+ "max(float_data), min(float_data), count(float_data), "
+ "max(double_data), min(double_data), count(double_data), "
+ "max(decimal_data), min(decimal_data), count(decimal_data), "
+ "max(binary_data), min(binary_data), count(binary_data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(*)")
&& explainString.contains("max(id)")
&& explainString.contains("min(id)")
&& explainString.contains("count(id)")
&& explainString.contains("max(int_data)")
&& explainString.contains("min(int_data)")
&& explainString.contains("count(int_data)")
&& explainString.contains("max(boolean_data)")
&& explainString.contains("min(boolean_data)")
&& explainString.contains("count(boolean_data)")
&& explainString.contains("max(float_data)")
&& explainString.contains("min(float_data)")
&& explainString.contains("count(float_data)")
&& explainString.contains("max(double_data)")
&& explainString.contains("min(double_data)")
&& explainString.contains("count(double_data)")
&& explainString.contains("max(decimal_data)")
&& explainString.contains("min(decimal_data)")
&& explainString.contains("count(decimal_data)")
&& explainString.contains("max(binary_data)")
&& explainString.contains("min(binary_data)")
&& explainString.contains("count(binary_data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
3L,
1L,
6L,
55,
33,
3L,
true,
false,
6L,
5.555f,
2.222f,
3L,
6.666666,
2.222222,
5L,
new BigDecimal("66.66"),
new BigDecimal("11.11"),
6L,
new byte[] {85, 85},
new byte[] {17, 17},
5L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testDateAndTimestampWithPartition() {
sql(
"CREATE TABLE %s (id bigint, data string, d date, ts timestamp) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, '1', date('2021-11-10'), null),"
+ "(1, '2', date('2021-11-11'), timestamp('2021-11-11 22:22:22')), "
+ "(2, '3', date('2021-11-12'), timestamp('2021-11-12 22:22:22')), "
+ "(2, '4', date('2021-11-13'), timestamp('2021-11-13 22:22:22')), "
+ "(3, '5', null, timestamp('2021-11-14 22:22:22')), "
+ "(3, '6', date('2021-11-14'), null)",
tableName);
String select = "SELECT max(d), min(d), count(d), max(ts), min(ts), count(ts) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(d)")
&& explainString.contains("min(d)")
&& explainString.contains("count(d)")
&& explainString.contains("max(ts)")
&& explainString.contains("min(ts)")
&& explainString.contains("count(ts)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
Date.valueOf("2021-11-14"),
Date.valueOf("2021-11-10"),
5L,
Timestamp.valueOf("2021-11-14 22:22:22.0"),
Timestamp.valueOf("2021-11-11 22:22:22.0"),
4L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregateNotPushDownIfOneCantPushDown() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
String select = "SELECT COUNT(data), SUM(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, 23331.0});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregatePushDownWithMetricsMode() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "none");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "id", "counts");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "data", "none");
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666)",
tableName);
String select1 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
// count(data) is not pushed down because the metrics mode is `none`
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(id) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// count(id) is pushed down because the metrics mode is `counts`
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
String select3 = "SELECT COUNT(id), MAX(id) FROM %s";
explainContainsPushDownAggregates = false;
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// COUNT(id), MAX(id) are not pushed down because MAX(id) is not pushed down (metrics mode is
// `counts`)
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, 3L});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregateNotPushDownForStringType() {
sql("CREATE TABLE %s (id LONG, data STRING) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, '1111'), (1, '2222'), (2, '3333'), (2, '4444'), (3, '5555'), (3, '6666') ",
tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "truncate(16)");
String select1 = "SELECT MAX(id), MAX(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("max(id)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {3L, "6666"});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
explainContainsPushDownAggregates = false;
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "full");
String select3 = "SELECT count(data), max(data) FROM %s";
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(data)") && explainString3.contains("max(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, "6666"});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregatePushDownWithDataFilter() {
testAggregatePushDownWithFilter(false);
}
@TestTemplate
public void testAggregatePushDownWithPartitionFilter() {
testAggregatePushDownWithFilter(true);
}
private void testAggregatePushDownWithFilter(boolean partitionFilerOnly) {
String createTable;
if (!partitionFilerOnly) {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg";
} else {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg PARTITIONED BY (id)";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES"
+ " (1, 11),"
+ " (1, 22),"
+ " (2, 33),"
+ " (2, 44),"
+ " (3, 55),"
+ " (3, 66) ",
tableName);
String select = "SELECT MIN(data) FROM %s WHERE id > 1";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("min(data)")) {
explainContainsPushDownAggregates = true;
}
if (!partitionFilerOnly) {
// Filters are not completely pushed down, we can't push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
} else {
// Filters are not completely pushed down, we can push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
}
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {33});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregateWithComplexType() {
sql("CREATE TABLE %s (id INT, complex STRUCT<c1:INT,c2:STRING>) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", 3, \"c2\", \"v1\")),"
+ "(2, named_struct(\"c1\", 2, \"c2\", \"v2\")), (3, null)",
tableName);
String select1 = "SELECT count(complex), count(id) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select1, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("count not pushed down for complex types")
.isFalse();
List<Object[]> actual = sql(select1, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {2L, 3L});
assertEquals("count not push down", actual, expected);
String select2 = "SELECT max(complex) FROM %s";
explain = sql("EXPLAIN " + select2, tableName);
explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
explainContainsPushDownAggregates = false;
if (explainString.contains("max(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("max not pushed down for complex types")
.isFalse();
}
@TestTemplate
public void testAggregationPushdownStructInteger() {
sql("CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:BIGINT>) USING iceberg", tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1)",
"max(struct_with_int.c1)",
"min(struct_with_int.c1)");
}
@TestTemplate
public void testAggregationPushdownNestedStruct() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:STRUCT<c2:STRUCT<c3:STRUCT<c4:BIGINT>>>>) USING iceberg",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", NULL)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 2)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 3)))))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1.c2.c3.c4";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1.c2.c3.c4)",
"max(struct_with_int.c1.c2.c3.c4)",
"min(struct_with_int.c1.c2.c3.c4)");
}
@TestTemplate
public void testAggregationPushdownStructTimestamp() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_ts STRUCT<c1:TIMESTAMP>) USING iceberg",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", timestamp('2023-01-30T22:22:22Z')))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", timestamp('2023-01-30T22:23:23Z')))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_ts.c1";
assertAggregates(
sql(query, aggField, aggField, aggField, tableName),
2L,
new Timestamp(1675117403000L),
new Timestamp(1675117342000L));
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_ts.c1)",
"max(struct_with_ts.c1)",
"min(struct_with_ts.c1)");
}
@TestTemplate
public void testAggregationPushdownOnBucketedColumn() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:INT>) USING iceberg PARTITIONED BY (bucket(8, id))",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (null, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "id";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 2L, 1L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(id)",
"max(id)",
"min(id)");
}
private void assertAggregates(
List<Object[]> actual, Object expectedCount, Object expectedMax, Object expectedMin) {
Object actualCount = actual.get(0)[0];
Object actualMax = actual.get(0)[1];
Object actualMin = actual.get(0)[2];
assertThat(actualCount).as("Expected and actual count should equal").isEqualTo(expectedCount);
assertThat(actualMax).as("Expected and actual max should equal").isEqualTo(expectedMax);
assertThat(actualMin).as("Expected and actual min should equal").isEqualTo(expectedMin);
}
private void assertExplainContains(List<Object[]> explain, String... expectedFragments) {
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
Arrays.stream(expectedFragments)
.forEach(
fragment ->
assertThat(explainString)
.as("Expected to find plan fragment in explain plan")
.contains(fragment));
}
@TestTemplate
public void testAggregatePushDownInDeleteCopyOnWrite() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
sql("DELETE FROM %s WHERE data = 1111", tableName);
String select = "SELECT max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("min/max/count pushed down for deleted")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6666, 2222, 5L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForTimeTravel() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
List<Object[]> expected1 = sql("SELECT count(id) FROM %s", tableName);
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
List<Object[]> expected2 = sql("SELECT count(id) FROM %s", tableName);
List<Object[]> explain1 =
sql("EXPLAIN SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates1 = false;
if (explainString1.contains("count(id)")) {
explainContainsPushDownAggregates1 = true;
}
assertThat(explainContainsPushDownAggregates1).as("count pushed down").isTrue();
List<Object[]> actual1 =
sql("SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
assertEquals("count push down", expected1, actual1);
List<Object[]> explain2 = sql("EXPLAIN SELECT count(id) FROM %s", tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates2 = false;
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates2 = true;
}
assertThat(explainContainsPushDownAggregates2).as("count pushed down").isTrue();
List<Object[]> actual2 = sql("SELECT count(id) FROM %s", tableName);
assertEquals("count push down", expected2, actual2);
}
@TestTemplate
public void testAllNull() {
sql("CREATE TABLE %s (id int, data int) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, null),"
+ "(1, null), "
+ "(2, null), "
+ "(2, null), "
+ "(3, null), "
+ "(3, null)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, null, null, 0L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAllNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, float('nan')), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, float('nan'))",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, Float.NaN, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, 2), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, 1)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, 1.0F, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testInfinity() {
sql(
"CREATE TABLE %s (id int, data1 float, data2 double, data3 double) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, float('-infinity'), double('infinity'), 1.23), "
+ "(1, float('-infinity'), double('infinity'), -1.23), "
+ "(1, float('-infinity'), double('infinity'), double('infinity')), "
+ "(1, float('-infinity'), double('infinity'), 2.23), "
+ "(1, float('-infinity'), double('infinity'), double('-infinity')), "
+ "(1, float('-infinity'), double('infinity'), -2.23)",
tableName);
String select =
"SELECT count(*), max(data1), min(data1), count(data1), max(data2), min(data2), count(data2), max(data3), min(data3), count(data3) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data1)")
&& explainString.contains("min(data1)")
&& explainString.contains("count(data1)")
&& explainString.contains("max(data2)")
&& explainString.contains("min(data2)")
&& explainString.contains("count(data2)")
&& explainString.contains("max(data3)")
&& explainString.contains("min(data3)")
&& explainString.contains("count(data3)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.POSITIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.NEGATIVE_INFINITY,
6L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForIncrementalScan() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId1 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
long snapshotId2 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (6, -7777), (7, 8888)", tableName);
long snapshotId3 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (8, 7777), (9, 9999)", tableName);
Dataset<Row> pushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId2)
.option(SparkReadOptions.END_SNAPSHOT_ID, snapshotId3)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain1 = pushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain1).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {-7777, 8888, 2L});
assertEquals("min/max/count push down", expected1, rowsToJava(pushdownDs.collectAsList()));
Dataset<Row> unboundedPushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId1)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain2 =
unboundedPushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain2).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {-7777, 9999, 6L});
assertEquals(
"min/max/count push down", expected2, rowsToJava(unboundedPushdownDs.collectAsList()));
}
}
|
google/schemaorg-java | 35,574 | src/main/java/com/google/schemaorg/core/impl/CarImpl.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.schemaorg.SchemaOrgTypeImpl;
import com.google.schemaorg.ValueType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Number;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.GoogConstants;
import com.google.schemaorg.goog.PopularityScoreSpecification;
/** Implementation of {@link Car}. */
public class CarImpl extends VehicleImpl implements Car {
private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet();
private static ImmutableSet<String> initializePropertySet() {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.add(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY);
builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE);
builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING);
builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME);
builder.add(CoreConstants.PROPERTY_AUDIENCE);
builder.add(CoreConstants.PROPERTY_AWARD);
builder.add(CoreConstants.PROPERTY_AWARDS);
builder.add(CoreConstants.PROPERTY_BRAND);
builder.add(CoreConstants.PROPERTY_CARGO_VOLUME);
builder.add(CoreConstants.PROPERTY_CATEGORY);
builder.add(CoreConstants.PROPERTY_COLOR);
builder.add(CoreConstants.PROPERTY_DATE_VEHICLE_FIRST_REGISTERED);
builder.add(CoreConstants.PROPERTY_DEPTH);
builder.add(CoreConstants.PROPERTY_DESCRIPTION);
builder.add(CoreConstants.PROPERTY_DRIVE_WHEEL_CONFIGURATION);
builder.add(CoreConstants.PROPERTY_FUEL_CONSUMPTION);
builder.add(CoreConstants.PROPERTY_FUEL_EFFICIENCY);
builder.add(CoreConstants.PROPERTY_FUEL_TYPE);
builder.add(CoreConstants.PROPERTY_GTIN12);
builder.add(CoreConstants.PROPERTY_GTIN13);
builder.add(CoreConstants.PROPERTY_GTIN14);
builder.add(CoreConstants.PROPERTY_GTIN8);
builder.add(CoreConstants.PROPERTY_HEIGHT);
builder.add(CoreConstants.PROPERTY_IMAGE);
builder.add(CoreConstants.PROPERTY_IS_ACCESSORY_OR_SPARE_PART_FOR);
builder.add(CoreConstants.PROPERTY_IS_CONSUMABLE_FOR);
builder.add(CoreConstants.PROPERTY_IS_RELATED_TO);
builder.add(CoreConstants.PROPERTY_IS_SIMILAR_TO);
builder.add(CoreConstants.PROPERTY_ITEM_CONDITION);
builder.add(CoreConstants.PROPERTY_KNOWN_VEHICLE_DAMAGES);
builder.add(CoreConstants.PROPERTY_LOGO);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE);
builder.add(CoreConstants.PROPERTY_MANUFACTURER);
builder.add(CoreConstants.PROPERTY_MILEAGE_FROM_ODOMETER);
builder.add(CoreConstants.PROPERTY_MODEL);
builder.add(CoreConstants.PROPERTY_MPN);
builder.add(CoreConstants.PROPERTY_NAME);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_AIRBAGS);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_AXLES);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_DOORS);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_FORWARD_GEARS);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_PREVIOUS_OWNERS);
builder.add(CoreConstants.PROPERTY_OFFERS);
builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION);
builder.add(CoreConstants.PROPERTY_PRODUCT_ID);
builder.add(CoreConstants.PROPERTY_PRODUCTION_DATE);
builder.add(CoreConstants.PROPERTY_PURCHASE_DATE);
builder.add(CoreConstants.PROPERTY_RELEASE_DATE);
builder.add(CoreConstants.PROPERTY_REVIEW);
builder.add(CoreConstants.PROPERTY_REVIEWS);
builder.add(CoreConstants.PROPERTY_SAME_AS);
builder.add(CoreConstants.PROPERTY_SKU);
builder.add(CoreConstants.PROPERTY_STEERING_POSITION);
builder.add(CoreConstants.PROPERTY_URL);
builder.add(CoreConstants.PROPERTY_VEHICLE_CONFIGURATION);
builder.add(CoreConstants.PROPERTY_VEHICLE_ENGINE);
builder.add(CoreConstants.PROPERTY_VEHICLE_IDENTIFICATION_NUMBER);
builder.add(CoreConstants.PROPERTY_VEHICLE_INTERIOR_COLOR);
builder.add(CoreConstants.PROPERTY_VEHICLE_INTERIOR_TYPE);
builder.add(CoreConstants.PROPERTY_VEHICLE_MODEL_DATE);
builder.add(CoreConstants.PROPERTY_VEHICLE_SEATING_CAPACITY);
builder.add(CoreConstants.PROPERTY_VEHICLE_TRANSMISSION);
builder.add(CoreConstants.PROPERTY_WEIGHT);
builder.add(CoreConstants.PROPERTY_WIDTH);
builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION);
builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE);
return builder.build();
}
static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<Car.Builder>
implements Car.Builder {
@Override
public Car.Builder addAdditionalProperty(PropertyValue value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value);
}
@Override
public Car.Builder addAdditionalProperty(PropertyValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value.build());
}
@Override
public Car.Builder addAdditionalProperty(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, Text.of(value));
}
@Override
public Car.Builder addAdditionalType(URL value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value);
}
@Override
public Car.Builder addAdditionalType(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value));
}
@Override
public Car.Builder addAggregateRating(AggregateRating value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value);
}
@Override
public Car.Builder addAggregateRating(AggregateRating.Builder value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build());
}
@Override
public Car.Builder addAggregateRating(String value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value));
}
@Override
public Car.Builder addAlternateName(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value);
}
@Override
public Car.Builder addAlternateName(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value));
}
@Override
public Car.Builder addAudience(Audience value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, value);
}
@Override
public Car.Builder addAudience(Audience.Builder value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, value.build());
}
@Override
public Car.Builder addAudience(String value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, Text.of(value));
}
@Override
public Car.Builder addAward(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARD, value);
}
@Override
public Car.Builder addAward(String value) {
return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value));
}
@Override
public Car.Builder addAwards(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, value);
}
@Override
public Car.Builder addAwards(String value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value));
}
@Override
public Car.Builder addBrand(Brand value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public Car.Builder addBrand(Brand.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public Car.Builder addBrand(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public Car.Builder addBrand(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public Car.Builder addBrand(String value) {
return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value));
}
@Override
public Car.Builder addCargoVolume(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_CARGO_VOLUME, value);
}
@Override
public Car.Builder addCargoVolume(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_CARGO_VOLUME, value.build());
}
@Override
public Car.Builder addCargoVolume(String value) {
return addProperty(CoreConstants.PROPERTY_CARGO_VOLUME, Text.of(value));
}
@Override
public Car.Builder addCategory(PhysicalActivityCategory value) {
return addProperty(CoreConstants.PROPERTY_CATEGORY, value);
}
@Override
public Car.Builder addCategory(Text value) {
return addProperty(CoreConstants.PROPERTY_CATEGORY, value);
}
@Override
public Car.Builder addCategory(Thing value) {
return addProperty(CoreConstants.PROPERTY_CATEGORY, value);
}
@Override
public Car.Builder addCategory(Thing.Builder value) {
return addProperty(CoreConstants.PROPERTY_CATEGORY, value.build());
}
@Override
public Car.Builder addCategory(String value) {
return addProperty(CoreConstants.PROPERTY_CATEGORY, Text.of(value));
}
@Override
public Car.Builder addColor(Text value) {
return addProperty(CoreConstants.PROPERTY_COLOR, value);
}
@Override
public Car.Builder addColor(String value) {
return addProperty(CoreConstants.PROPERTY_COLOR, Text.of(value));
}
@Override
public Car.Builder addDateVehicleFirstRegistered(Date value) {
return addProperty(CoreConstants.PROPERTY_DATE_VEHICLE_FIRST_REGISTERED, value);
}
@Override
public Car.Builder addDateVehicleFirstRegistered(String value) {
return addProperty(CoreConstants.PROPERTY_DATE_VEHICLE_FIRST_REGISTERED, Text.of(value));
}
@Override
public Car.Builder addDepth(Distance value) {
return addProperty(CoreConstants.PROPERTY_DEPTH, value);
}
@Override
public Car.Builder addDepth(Distance.Builder value) {
return addProperty(CoreConstants.PROPERTY_DEPTH, value.build());
}
@Override
public Car.Builder addDepth(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_DEPTH, value);
}
@Override
public Car.Builder addDepth(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_DEPTH, value.build());
}
@Override
public Car.Builder addDepth(String value) {
return addProperty(CoreConstants.PROPERTY_DEPTH, Text.of(value));
}
@Override
public Car.Builder addDescription(Text value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value);
}
@Override
public Car.Builder addDescription(String value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value));
}
@Override
public Car.Builder addDriveWheelConfiguration(DriveWheelConfigurationValue value) {
return addProperty(CoreConstants.PROPERTY_DRIVE_WHEEL_CONFIGURATION, value);
}
@Override
public Car.Builder addDriveWheelConfiguration(Text value) {
return addProperty(CoreConstants.PROPERTY_DRIVE_WHEEL_CONFIGURATION, value);
}
@Override
public Car.Builder addDriveWheelConfiguration(String value) {
return addProperty(CoreConstants.PROPERTY_DRIVE_WHEEL_CONFIGURATION, Text.of(value));
}
@Override
public Car.Builder addFuelConsumption(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_FUEL_CONSUMPTION, value);
}
@Override
public Car.Builder addFuelConsumption(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_FUEL_CONSUMPTION, value.build());
}
@Override
public Car.Builder addFuelConsumption(String value) {
return addProperty(CoreConstants.PROPERTY_FUEL_CONSUMPTION, Text.of(value));
}
@Override
public Car.Builder addFuelEfficiency(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_FUEL_EFFICIENCY, value);
}
@Override
public Car.Builder addFuelEfficiency(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_FUEL_EFFICIENCY, value.build());
}
@Override
public Car.Builder addFuelEfficiency(String value) {
return addProperty(CoreConstants.PROPERTY_FUEL_EFFICIENCY, Text.of(value));
}
@Override
public Car.Builder addFuelType(QualitativeValue value) {
return addProperty(CoreConstants.PROPERTY_FUEL_TYPE, value);
}
@Override
public Car.Builder addFuelType(Text value) {
return addProperty(CoreConstants.PROPERTY_FUEL_TYPE, value);
}
@Override
public Car.Builder addFuelType(URL value) {
return addProperty(CoreConstants.PROPERTY_FUEL_TYPE, value);
}
@Override
public Car.Builder addFuelType(String value) {
return addProperty(CoreConstants.PROPERTY_FUEL_TYPE, Text.of(value));
}
@Override
public Car.Builder addGtin12(Text value) {
return addProperty(CoreConstants.PROPERTY_GTIN12, value);
}
@Override
public Car.Builder addGtin12(String value) {
return addProperty(CoreConstants.PROPERTY_GTIN12, Text.of(value));
}
@Override
public Car.Builder addGtin13(Text value) {
return addProperty(CoreConstants.PROPERTY_GTIN13, value);
}
@Override
public Car.Builder addGtin13(String value) {
return addProperty(CoreConstants.PROPERTY_GTIN13, Text.of(value));
}
@Override
public Car.Builder addGtin14(Text value) {
return addProperty(CoreConstants.PROPERTY_GTIN14, value);
}
@Override
public Car.Builder addGtin14(String value) {
return addProperty(CoreConstants.PROPERTY_GTIN14, Text.of(value));
}
@Override
public Car.Builder addGtin8(Text value) {
return addProperty(CoreConstants.PROPERTY_GTIN8, value);
}
@Override
public Car.Builder addGtin8(String value) {
return addProperty(CoreConstants.PROPERTY_GTIN8, Text.of(value));
}
@Override
public Car.Builder addHeight(Distance value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value);
}
@Override
public Car.Builder addHeight(Distance.Builder value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value.build());
}
@Override
public Car.Builder addHeight(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value);
}
@Override
public Car.Builder addHeight(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value.build());
}
@Override
public Car.Builder addHeight(String value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, Text.of(value));
}
@Override
public Car.Builder addImage(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public Car.Builder addImage(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value.build());
}
@Override
public Car.Builder addImage(URL value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public Car.Builder addImage(String value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value));
}
@Override
public Car.Builder addIsAccessoryOrSparePartFor(Product value) {
return addProperty(CoreConstants.PROPERTY_IS_ACCESSORY_OR_SPARE_PART_FOR, value);
}
@Override
public Car.Builder addIsAccessoryOrSparePartFor(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_IS_ACCESSORY_OR_SPARE_PART_FOR, value.build());
}
@Override
public Car.Builder addIsAccessoryOrSparePartFor(String value) {
return addProperty(CoreConstants.PROPERTY_IS_ACCESSORY_OR_SPARE_PART_FOR, Text.of(value));
}
@Override
public Car.Builder addIsConsumableFor(Product value) {
return addProperty(CoreConstants.PROPERTY_IS_CONSUMABLE_FOR, value);
}
@Override
public Car.Builder addIsConsumableFor(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_IS_CONSUMABLE_FOR, value.build());
}
@Override
public Car.Builder addIsConsumableFor(String value) {
return addProperty(CoreConstants.PROPERTY_IS_CONSUMABLE_FOR, Text.of(value));
}
@Override
public Car.Builder addIsRelatedTo(Product value) {
return addProperty(CoreConstants.PROPERTY_IS_RELATED_TO, value);
}
@Override
public Car.Builder addIsRelatedTo(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_IS_RELATED_TO, value.build());
}
@Override
public Car.Builder addIsRelatedTo(String value) {
return addProperty(CoreConstants.PROPERTY_IS_RELATED_TO, Text.of(value));
}
@Override
public Car.Builder addIsSimilarTo(Product value) {
return addProperty(CoreConstants.PROPERTY_IS_SIMILAR_TO, value);
}
@Override
public Car.Builder addIsSimilarTo(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_IS_SIMILAR_TO, value.build());
}
@Override
public Car.Builder addIsSimilarTo(String value) {
return addProperty(CoreConstants.PROPERTY_IS_SIMILAR_TO, Text.of(value));
}
@Override
public Car.Builder addItemCondition(OfferItemCondition value) {
return addProperty(CoreConstants.PROPERTY_ITEM_CONDITION, value);
}
@Override
public Car.Builder addItemCondition(String value) {
return addProperty(CoreConstants.PROPERTY_ITEM_CONDITION, Text.of(value));
}
@Override
public Car.Builder addKnownVehicleDamages(Text value) {
return addProperty(CoreConstants.PROPERTY_KNOWN_VEHICLE_DAMAGES, value);
}
@Override
public Car.Builder addKnownVehicleDamages(String value) {
return addProperty(CoreConstants.PROPERTY_KNOWN_VEHICLE_DAMAGES, Text.of(value));
}
@Override
public Car.Builder addLogo(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public Car.Builder addLogo(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value.build());
}
@Override
public Car.Builder addLogo(URL value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public Car.Builder addLogo(String value) {
return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value));
}
@Override
public Car.Builder addMainEntityOfPage(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public Car.Builder addMainEntityOfPage(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build());
}
@Override
public Car.Builder addMainEntityOfPage(URL value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public Car.Builder addMainEntityOfPage(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value));
}
@Override
public Car.Builder addManufacturer(Organization value) {
return addProperty(CoreConstants.PROPERTY_MANUFACTURER, value);
}
@Override
public Car.Builder addManufacturer(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MANUFACTURER, value.build());
}
@Override
public Car.Builder addManufacturer(String value) {
return addProperty(CoreConstants.PROPERTY_MANUFACTURER, Text.of(value));
}
@Override
public Car.Builder addMileageFromOdometer(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_MILEAGE_FROM_ODOMETER, value);
}
@Override
public Car.Builder addMileageFromOdometer(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_MILEAGE_FROM_ODOMETER, value.build());
}
@Override
public Car.Builder addMileageFromOdometer(String value) {
return addProperty(CoreConstants.PROPERTY_MILEAGE_FROM_ODOMETER, Text.of(value));
}
@Override
public Car.Builder addModel(ProductModel value) {
return addProperty(CoreConstants.PROPERTY_MODEL, value);
}
@Override
public Car.Builder addModel(ProductModel.Builder value) {
return addProperty(CoreConstants.PROPERTY_MODEL, value.build());
}
@Override
public Car.Builder addModel(Text value) {
return addProperty(CoreConstants.PROPERTY_MODEL, value);
}
@Override
public Car.Builder addModel(String value) {
return addProperty(CoreConstants.PROPERTY_MODEL, Text.of(value));
}
@Override
public Car.Builder addMpn(Text value) {
return addProperty(CoreConstants.PROPERTY_MPN, value);
}
@Override
public Car.Builder addMpn(String value) {
return addProperty(CoreConstants.PROPERTY_MPN, Text.of(value));
}
@Override
public Car.Builder addName(Text value) {
return addProperty(CoreConstants.PROPERTY_NAME, value);
}
@Override
public Car.Builder addName(String value) {
return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value));
}
@Override
public Car.Builder addNumberOfAirbags(Number value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AIRBAGS, value);
}
@Override
public Car.Builder addNumberOfAirbags(Text value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AIRBAGS, value);
}
@Override
public Car.Builder addNumberOfAirbags(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AIRBAGS, Text.of(value));
}
@Override
public Car.Builder addNumberOfAxles(Number value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AXLES, value);
}
@Override
public Car.Builder addNumberOfAxles(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AXLES, value);
}
@Override
public Car.Builder addNumberOfAxles(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AXLES, value.build());
}
@Override
public Car.Builder addNumberOfAxles(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_AXLES, Text.of(value));
}
@Override
public Car.Builder addNumberOfDoors(Number value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_DOORS, value);
}
@Override
public Car.Builder addNumberOfDoors(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_DOORS, value);
}
@Override
public Car.Builder addNumberOfDoors(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_DOORS, value.build());
}
@Override
public Car.Builder addNumberOfDoors(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_DOORS, Text.of(value));
}
@Override
public Car.Builder addNumberOfForwardGears(Number value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_FORWARD_GEARS, value);
}
@Override
public Car.Builder addNumberOfForwardGears(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_FORWARD_GEARS, value);
}
@Override
public Car.Builder addNumberOfForwardGears(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_FORWARD_GEARS, value.build());
}
@Override
public Car.Builder addNumberOfForwardGears(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_FORWARD_GEARS, Text.of(value));
}
@Override
public Car.Builder addNumberOfPreviousOwners(Number value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_PREVIOUS_OWNERS, value);
}
@Override
public Car.Builder addNumberOfPreviousOwners(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_PREVIOUS_OWNERS, value);
}
@Override
public Car.Builder addNumberOfPreviousOwners(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_PREVIOUS_OWNERS, value.build());
}
@Override
public Car.Builder addNumberOfPreviousOwners(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_PREVIOUS_OWNERS, Text.of(value));
}
@Override
public Car.Builder addOffers(Offer value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, value);
}
@Override
public Car.Builder addOffers(Offer.Builder value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, value.build());
}
@Override
public Car.Builder addOffers(String value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, Text.of(value));
}
@Override
public Car.Builder addPotentialAction(Action value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value);
}
@Override
public Car.Builder addPotentialAction(Action.Builder value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build());
}
@Override
public Car.Builder addPotentialAction(String value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value));
}
@Override
public Car.Builder addProductID(Text value) {
return addProperty(CoreConstants.PROPERTY_PRODUCT_ID, value);
}
@Override
public Car.Builder addProductID(String value) {
return addProperty(CoreConstants.PROPERTY_PRODUCT_ID, Text.of(value));
}
@Override
public Car.Builder addProductionDate(Date value) {
return addProperty(CoreConstants.PROPERTY_PRODUCTION_DATE, value);
}
@Override
public Car.Builder addProductionDate(String value) {
return addProperty(CoreConstants.PROPERTY_PRODUCTION_DATE, Text.of(value));
}
@Override
public Car.Builder addPurchaseDate(Date value) {
return addProperty(CoreConstants.PROPERTY_PURCHASE_DATE, value);
}
@Override
public Car.Builder addPurchaseDate(String value) {
return addProperty(CoreConstants.PROPERTY_PURCHASE_DATE, Text.of(value));
}
@Override
public Car.Builder addReleaseDate(Date value) {
return addProperty(CoreConstants.PROPERTY_RELEASE_DATE, value);
}
@Override
public Car.Builder addReleaseDate(String value) {
return addProperty(CoreConstants.PROPERTY_RELEASE_DATE, Text.of(value));
}
@Override
public Car.Builder addReview(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value);
}
@Override
public Car.Builder addReview(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value.build());
}
@Override
public Car.Builder addReview(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value));
}
@Override
public Car.Builder addReviews(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value);
}
@Override
public Car.Builder addReviews(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build());
}
@Override
public Car.Builder addReviews(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value));
}
@Override
public Car.Builder addSameAs(URL value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, value);
}
@Override
public Car.Builder addSameAs(String value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value));
}
@Override
public Car.Builder addSku(Text value) {
return addProperty(CoreConstants.PROPERTY_SKU, value);
}
@Override
public Car.Builder addSku(String value) {
return addProperty(CoreConstants.PROPERTY_SKU, Text.of(value));
}
@Override
public Car.Builder addSteeringPosition(SteeringPositionValue value) {
return addProperty(CoreConstants.PROPERTY_STEERING_POSITION, value);
}
@Override
public Car.Builder addSteeringPosition(String value) {
return addProperty(CoreConstants.PROPERTY_STEERING_POSITION, Text.of(value));
}
@Override
public Car.Builder addUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_URL, value);
}
@Override
public Car.Builder addUrl(String value) {
return addProperty(CoreConstants.PROPERTY_URL, Text.of(value));
}
@Override
public Car.Builder addVehicleConfiguration(Text value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_CONFIGURATION, value);
}
@Override
public Car.Builder addVehicleConfiguration(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_CONFIGURATION, Text.of(value));
}
@Override
public Car.Builder addVehicleEngine(EngineSpecification value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_ENGINE, value);
}
@Override
public Car.Builder addVehicleEngine(EngineSpecification.Builder value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_ENGINE, value.build());
}
@Override
public Car.Builder addVehicleEngine(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_ENGINE, Text.of(value));
}
@Override
public Car.Builder addVehicleIdentificationNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_IDENTIFICATION_NUMBER, value);
}
@Override
public Car.Builder addVehicleIdentificationNumber(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_IDENTIFICATION_NUMBER, Text.of(value));
}
@Override
public Car.Builder addVehicleInteriorColor(Text value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_INTERIOR_COLOR, value);
}
@Override
public Car.Builder addVehicleInteriorColor(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_INTERIOR_COLOR, Text.of(value));
}
@Override
public Car.Builder addVehicleInteriorType(Text value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_INTERIOR_TYPE, value);
}
@Override
public Car.Builder addVehicleInteriorType(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_INTERIOR_TYPE, Text.of(value));
}
@Override
public Car.Builder addVehicleModelDate(Date value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_MODEL_DATE, value);
}
@Override
public Car.Builder addVehicleModelDate(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_MODEL_DATE, Text.of(value));
}
@Override
public Car.Builder addVehicleSeatingCapacity(Number value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_SEATING_CAPACITY, value);
}
@Override
public Car.Builder addVehicleSeatingCapacity(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_SEATING_CAPACITY, value);
}
@Override
public Car.Builder addVehicleSeatingCapacity(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_SEATING_CAPACITY, value.build());
}
@Override
public Car.Builder addVehicleSeatingCapacity(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_SEATING_CAPACITY, Text.of(value));
}
@Override
public Car.Builder addVehicleTransmission(QualitativeValue value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_TRANSMISSION, value);
}
@Override
public Car.Builder addVehicleTransmission(Text value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_TRANSMISSION, value);
}
@Override
public Car.Builder addVehicleTransmission(URL value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_TRANSMISSION, value);
}
@Override
public Car.Builder addVehicleTransmission(String value) {
return addProperty(CoreConstants.PROPERTY_VEHICLE_TRANSMISSION, Text.of(value));
}
@Override
public Car.Builder addWeight(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_WEIGHT, value);
}
@Override
public Car.Builder addWeight(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_WEIGHT, value.build());
}
@Override
public Car.Builder addWeight(String value) {
return addProperty(CoreConstants.PROPERTY_WEIGHT, Text.of(value));
}
@Override
public Car.Builder addWidth(Distance value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value);
}
@Override
public Car.Builder addWidth(Distance.Builder value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value.build());
}
@Override
public Car.Builder addWidth(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value);
}
@Override
public Car.Builder addWidth(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value.build());
}
@Override
public Car.Builder addWidth(String value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, Text.of(value));
}
@Override
public Car.Builder addDetailedDescription(Article value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value);
}
@Override
public Car.Builder addDetailedDescription(Article.Builder value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build());
}
@Override
public Car.Builder addDetailedDescription(String value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value));
}
@Override
public Car.Builder addPopularityScore(PopularityScoreSpecification value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value);
}
@Override
public Car.Builder addPopularityScore(PopularityScoreSpecification.Builder value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build());
}
@Override
public Car.Builder addPopularityScore(String value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value));
}
@Override
public Car build() {
return new CarImpl(properties, reverseMap);
}
}
public CarImpl(Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) {
super(properties, reverseMap);
}
@Override
public String getFullTypeName() {
return CoreConstants.TYPE_CAR;
}
@Override
public boolean includesProperty(String property) {
return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(GoogConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(property);
}
}
|
openjdk/jdk8 | 35,565 | jdk/src/share/classes/com/sun/org/apache/xml/internal/security/utils/XMLUtils.java | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sun.org.apache.xml.internal.security.utils;
import java.io.IOException;
import java.io.OutputStream;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.sun.org.apache.xml.internal.security.c14n.CanonicalizationException;
import com.sun.org.apache.xml.internal.security.c14n.Canonicalizer;
import com.sun.org.apache.xml.internal.security.c14n.InvalidCanonicalizerException;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
/**
* DOM and XML accessibility and comfort functions.
*
* @author Christian Geuer-Pollmann
*/
public class XMLUtils {
private static boolean ignoreLineBreaks =
AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
public Boolean run() {
return Boolean.valueOf(Boolean.getBoolean
("com.sun.org.apache.xml.internal.security.ignoreLineBreaks"));
}
}).booleanValue();
private static volatile String dsPrefix = "ds";
private static volatile String ds11Prefix = "dsig11";
private static volatile String xencPrefix = "xenc";
private static volatile String xenc11Prefix = "xenc11";
/** {@link org.apache.commons.logging} logging facility */
private static final java.util.logging.Logger log =
java.util.logging.Logger.getLogger(XMLUtils.class.getName());
/**
* Constructor XMLUtils
*
*/
private XMLUtils() {
// we don't allow instantiation
}
/**
* Set the prefix for the digital signature namespace
* @param prefix the new prefix for the digital signature namespace
*/
public static void setDsPrefix(String prefix) {
dsPrefix = prefix;
}
/**
* Set the prefix for the digital signature 1.1 namespace
* @param prefix the new prefix for the digital signature 1.1 namespace
*/
public static void setDs11Prefix(String prefix) {
ds11Prefix = prefix;
}
/**
* Set the prefix for the encryption namespace
* @param prefix the new prefix for the encryption namespace
*/
public static void setXencPrefix(String prefix) {
xencPrefix = prefix;
}
/**
* Set the prefix for the encryption namespace 1.1
* @param prefix the new prefix for the encryption namespace 1.1
*/
public static void setXenc11Prefix(String prefix) {
xenc11Prefix = prefix;
}
public static Element getNextElement(Node el) {
Node node = el;
while ((node != null) && (node.getNodeType() != Node.ELEMENT_NODE)) {
node = node.getNextSibling();
}
return (Element)node;
}
/**
* @param rootNode
* @param result
* @param exclude
* @param com whether comments or not
*/
public static void getSet(Node rootNode, Set<Node> result, Node exclude, boolean com) {
if ((exclude != null) && isDescendantOrSelf(exclude, rootNode)) {
return;
}
getSetRec(rootNode, result, exclude, com);
}
@SuppressWarnings("fallthrough")
private static void getSetRec(final Node rootNode, final Set<Node> result,
final Node exclude, final boolean com) {
if (rootNode == exclude) {
return;
}
switch (rootNode.getNodeType()) {
case Node.ELEMENT_NODE:
result.add(rootNode);
Element el = (Element)rootNode;
if (el.hasAttributes()) {
NamedNodeMap nl = el.getAttributes();
for (int i = 0;i < nl.getLength(); i++) {
result.add(nl.item(i));
}
}
//no return keep working
case Node.DOCUMENT_NODE:
for (Node r = rootNode.getFirstChild(); r != null; r = r.getNextSibling()) {
if (r.getNodeType() == Node.TEXT_NODE) {
result.add(r);
while ((r != null) && (r.getNodeType() == Node.TEXT_NODE)) {
r = r.getNextSibling();
}
if (r == null) {
return;
}
}
getSetRec(r, result, exclude, com);
}
return;
case Node.COMMENT_NODE:
if (com) {
result.add(rootNode);
}
return;
case Node.DOCUMENT_TYPE_NODE:
return;
default:
result.add(rootNode);
}
}
/**
* Outputs a DOM tree to an {@link OutputStream}.
*
* @param contextNode root node of the DOM tree
* @param os the {@link OutputStream}
*/
public static void outputDOM(Node contextNode, OutputStream os) {
XMLUtils.outputDOM(contextNode, os, false);
}
/**
* Outputs a DOM tree to an {@link OutputStream}. <I>If an Exception is
* thrown during execution, it's StackTrace is output to System.out, but the
* Exception is not re-thrown.</I>
*
* @param contextNode root node of the DOM tree
* @param os the {@link OutputStream}
* @param addPreamble
*/
public static void outputDOM(Node contextNode, OutputStream os, boolean addPreamble) {
try {
if (addPreamble) {
os.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n".getBytes("UTF-8"));
}
os.write(Canonicalizer.getInstance(
Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS).canonicalizeSubtree(contextNode)
);
} catch (IOException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
}
catch (InvalidCanonicalizerException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
} catch (CanonicalizationException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
}
}
/**
* Serializes the <CODE>contextNode</CODE> into the OutputStream, <I>but
* suppresses all Exceptions</I>.
* <BR />
* NOTE: <I>This should only be used for debugging purposes,
* NOT in a production environment; this method ignores all exceptions,
* so you won't notice if something goes wrong. If you're asking what is to
* be used in a production environment, simply use the code inside the
* <code>try{}</code> statement, but handle the Exceptions appropriately.</I>
*
* @param contextNode
* @param os
*/
public static void outputDOMc14nWithComments(Node contextNode, OutputStream os) {
try {
os.write(Canonicalizer.getInstance(
Canonicalizer.ALGO_ID_C14N_WITH_COMMENTS).canonicalizeSubtree(contextNode)
);
} catch (IOException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
// throw new RuntimeException(ex.getMessage());
} catch (InvalidCanonicalizerException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
// throw new RuntimeException(ex.getMessage());
} catch (CanonicalizationException ex) {
if (log.isLoggable(java.util.logging.Level.FINE)) {
log.log(java.util.logging.Level.FINE, ex.getMessage(), ex);
}
// throw new RuntimeException(ex.getMessage());
}
}
/**
* Method getFullTextChildrenFromElement
*
* @param element
* @return the string of children
*/
public static String getFullTextChildrenFromElement(Element element) {
StringBuilder sb = new StringBuilder();
Node child = element.getFirstChild();
while (child != null) {
if (child.getNodeType() == Node.TEXT_NODE) {
sb.append(((Text)child).getData());
}
child = child.getNextSibling();
}
return sb.toString();
}
/**
* Creates an Element in the XML Signature specification namespace.
*
* @param doc the factory Document
* @param elementName the local name of the Element
* @return the Element
*/
public static Element createElementInSignatureSpace(Document doc, String elementName) {
if (doc == null) {
throw new RuntimeException("Document is null");
}
if ((dsPrefix == null) || (dsPrefix.length() == 0)) {
return doc.createElementNS(Constants.SignatureSpecNS, elementName);
}
return doc.createElementNS(Constants.SignatureSpecNS, dsPrefix + ":" + elementName);
}
/**
* Creates an Element in the XML Signature 1.1 specification namespace.
*
* @param doc the factory Document
* @param elementName the local name of the Element
* @return the Element
*/
public static Element createElementInSignature11Space(Document doc, String elementName) {
if (doc == null) {
throw new RuntimeException("Document is null");
}
if ((ds11Prefix == null) || (ds11Prefix.length() == 0)) {
return doc.createElementNS(Constants.SignatureSpec11NS, elementName);
}
return doc.createElementNS(Constants.SignatureSpec11NS, ds11Prefix + ":" + elementName);
}
/**
* Creates an Element in the XML Encryption specification namespace.
*
* @param doc the factory Document
* @param elementName the local name of the Element
* @return the Element
*/
public static Element createElementInEncryptionSpace(Document doc, String elementName) {
if (doc == null) {
throw new RuntimeException("Document is null");
}
if ((xencPrefix == null) || (xencPrefix.length() == 0)) {
return doc.createElementNS(EncryptionConstants.EncryptionSpecNS, elementName);
}
return
doc.createElementNS(
EncryptionConstants.EncryptionSpecNS, xencPrefix + ":" + elementName
);
}
/**
* Creates an Element in the XML Encryption 1.1 specification namespace.
*
* @param doc the factory Document
* @param elementName the local name of the Element
* @return the Element
*/
public static Element createElementInEncryption11Space(Document doc, String elementName) {
if (doc == null) {
throw new RuntimeException("Document is null");
}
if ((xenc11Prefix == null) || (xenc11Prefix.length() == 0)) {
return doc.createElementNS(EncryptionConstants.EncryptionSpec11NS, elementName);
}
return
doc.createElementNS(
EncryptionConstants.EncryptionSpec11NS, xenc11Prefix + ":" + elementName
);
}
/**
* Returns true if the element is in XML Signature namespace and the local
* name equals the supplied one.
*
* @param element
* @param localName
* @return true if the element is in XML Signature namespace and the local name equals
* the supplied one
*/
public static boolean elementIsInSignatureSpace(Element element, String localName) {
if (element == null){
return false;
}
return Constants.SignatureSpecNS.equals(element.getNamespaceURI())
&& element.getLocalName().equals(localName);
}
/**
* Returns true if the element is in XML Signature 1.1 namespace and the local
* name equals the supplied one.
*
* @param element
* @param localName
* @return true if the element is in XML Signature namespace and the local name equals
* the supplied one
*/
public static boolean elementIsInSignature11Space(Element element, String localName) {
if (element == null) {
return false;
}
return Constants.SignatureSpec11NS.equals(element.getNamespaceURI())
&& element.getLocalName().equals(localName);
}
/**
* Returns true if the element is in XML Encryption namespace and the local
* name equals the supplied one.
*
* @param element
* @param localName
* @return true if the element is in XML Encryption namespace and the local name
* equals the supplied one
*/
public static boolean elementIsInEncryptionSpace(Element element, String localName) {
if (element == null){
return false;
}
return EncryptionConstants.EncryptionSpecNS.equals(element.getNamespaceURI())
&& element.getLocalName().equals(localName);
}
/**
* Returns true if the element is in XML Encryption 1.1 namespace and the local
* name equals the supplied one.
*
* @param element
* @param localName
* @return true if the element is in XML Encryption 1.1 namespace and the local name
* equals the supplied one
*/
public static boolean elementIsInEncryption11Space(Element element, String localName) {
if (element == null){
return false;
}
return EncryptionConstants.EncryptionSpec11NS.equals(element.getNamespaceURI())
&& element.getLocalName().equals(localName);
}
/**
* This method returns the owner document of a particular node.
* This method is necessary because it <I>always</I> returns a
* {@link Document}. {@link Node#getOwnerDocument} returns <CODE>null</CODE>
* if the {@link Node} is a {@link Document}.
*
* @param node
* @return the owner document of the node
*/
public static Document getOwnerDocument(Node node) {
if (node.getNodeType() == Node.DOCUMENT_NODE) {
return (Document) node;
}
try {
return node.getOwnerDocument();
} catch (NullPointerException npe) {
throw new NullPointerException(I18n.translate("endorsed.jdk1.4.0")
+ " Original message was \""
+ npe.getMessage() + "\"");
}
}
/**
* This method returns the first non-null owner document of the Nodes in this Set.
* This method is necessary because it <I>always</I> returns a
* {@link Document}. {@link Node#getOwnerDocument} returns <CODE>null</CODE>
* if the {@link Node} is a {@link Document}.
*
* @param xpathNodeSet
* @return the owner document
*/
public static Document getOwnerDocument(Set<Node> xpathNodeSet) {
NullPointerException npe = null;
for (Node node : xpathNodeSet) {
int nodeType = node.getNodeType();
if (nodeType == Node.DOCUMENT_NODE) {
return (Document) node;
}
try {
if (nodeType == Node.ATTRIBUTE_NODE) {
return ((Attr)node).getOwnerElement().getOwnerDocument();
}
return node.getOwnerDocument();
} catch (NullPointerException e) {
npe = e;
}
}
throw new NullPointerException(I18n.translate("endorsed.jdk1.4.0")
+ " Original message was \""
+ (npe == null ? "" : npe.getMessage()) + "\"");
}
/**
* Method createDSctx
*
* @param doc
* @param prefix
* @param namespace
* @return the element.
*/
public static Element createDSctx(Document doc, String prefix, String namespace) {
if ((prefix == null) || (prefix.trim().length() == 0)) {
throw new IllegalArgumentException("You must supply a prefix");
}
Element ctx = doc.createElementNS(null, "namespaceContext");
ctx.setAttributeNS(Constants.NamespaceSpecNS, "xmlns:" + prefix.trim(), namespace);
return ctx;
}
/**
* Method addReturnToElement
*
* @param e
*/
public static void addReturnToElement(Element e) {
if (!ignoreLineBreaks) {
Document doc = e.getOwnerDocument();
e.appendChild(doc.createTextNode("\n"));
}
}
public static void addReturnToElement(Document doc, HelperNodeList nl) {
if (!ignoreLineBreaks) {
nl.appendChild(doc.createTextNode("\n"));
}
}
public static void addReturnBeforeChild(Element e, Node child) {
if (!ignoreLineBreaks) {
Document doc = e.getOwnerDocument();
e.insertBefore(doc.createTextNode("\n"), child);
}
}
/**
* Method convertNodelistToSet
*
* @param xpathNodeSet
* @return the set with the nodelist
*/
public static Set<Node> convertNodelistToSet(NodeList xpathNodeSet) {
if (xpathNodeSet == null) {
return new HashSet<Node>();
}
int length = xpathNodeSet.getLength();
Set<Node> set = new HashSet<Node>(length);
for (int i = 0; i < length; i++) {
set.add(xpathNodeSet.item(i));
}
return set;
}
/**
* This method spreads all namespace attributes in a DOM document to their
* children. This is needed because the XML Signature XPath transform
* must evaluate the XPath against all nodes in the input, even against
* XPath namespace nodes. Through a bug in XalanJ2, the namespace nodes are
* not fully visible in the Xalan XPath model, so we have to do this by
* hand in DOM spaces so that the nodes become visible in XPath space.
*
* @param doc
* @see <A HREF="http://nagoya.apache.org/bugzilla/show_bug.cgi?id=2650">
* Namespace axis resolution is not XPath compliant </A>
*/
public static void circumventBug2650(Document doc) {
Element documentElement = doc.getDocumentElement();
// if the document element has no xmlns definition, we add xmlns=""
Attr xmlnsAttr =
documentElement.getAttributeNodeNS(Constants.NamespaceSpecNS, "xmlns");
if (xmlnsAttr == null) {
documentElement.setAttributeNS(Constants.NamespaceSpecNS, "xmlns", "");
}
XMLUtils.circumventBug2650internal(doc);
}
/**
* This is the work horse for {@link #circumventBug2650}.
*
* @param node
* @see <A HREF="http://nagoya.apache.org/bugzilla/show_bug.cgi?id=2650">
* Namespace axis resolution is not XPath compliant </A>
*/
@SuppressWarnings("fallthrough")
private static void circumventBug2650internal(Node node) {
Node parent = null;
Node sibling = null;
final String namespaceNs = Constants.NamespaceSpecNS;
do {
switch (node.getNodeType()) {
case Node.ELEMENT_NODE :
Element element = (Element) node;
if (!element.hasChildNodes()) {
break;
}
if (element.hasAttributes()) {
NamedNodeMap attributes = element.getAttributes();
int attributesLength = attributes.getLength();
for (Node child = element.getFirstChild(); child!=null;
child = child.getNextSibling()) {
if (child.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
Element childElement = (Element) child;
for (int i = 0; i < attributesLength; i++) {
Attr currentAttr = (Attr) attributes.item(i);
if (!namespaceNs.equals(currentAttr.getNamespaceURI())) {
continue;
}
if (childElement.hasAttributeNS(namespaceNs,
currentAttr.getLocalName())) {
continue;
}
childElement.setAttributeNS(namespaceNs,
currentAttr.getName(),
currentAttr.getNodeValue());
}
}
}
case Node.ENTITY_REFERENCE_NODE :
case Node.DOCUMENT_NODE :
parent = node;
sibling = node.getFirstChild();
break;
}
while ((sibling == null) && (parent != null)) {
sibling = parent.getNextSibling();
parent = parent.getParentNode();
}
if (sibling == null) {
return;
}
node = sibling;
sibling = node.getNextSibling();
} while (true);
}
/**
* @param sibling
* @param nodeName
* @param number
* @return nodes with the constraint
*/
public static Element selectDsNode(Node sibling, String nodeName, int number) {
while (sibling != null) {
if (Constants.SignatureSpecNS.equals(sibling.getNamespaceURI())
&& sibling.getLocalName().equals(nodeName)) {
if (number == 0){
return (Element)sibling;
}
number--;
}
sibling = sibling.getNextSibling();
}
return null;
}
/**
* @param sibling
* @param nodeName
* @param number
* @return nodes with the constraint
*/
public static Element selectDs11Node(Node sibling, String nodeName, int number) {
while (sibling != null) {
if (Constants.SignatureSpec11NS.equals(sibling.getNamespaceURI())
&& sibling.getLocalName().equals(nodeName)) {
if (number == 0){
return (Element)sibling;
}
number--;
}
sibling = sibling.getNextSibling();
}
return null;
}
/**
* @param sibling
* @param nodeName
* @param number
* @return nodes with the constrain
*/
public static Element selectXencNode(Node sibling, String nodeName, int number) {
while (sibling != null) {
if (EncryptionConstants.EncryptionSpecNS.equals(sibling.getNamespaceURI())
&& sibling.getLocalName().equals(nodeName)) {
if (number == 0){
return (Element)sibling;
}
number--;
}
sibling = sibling.getNextSibling();
}
return null;
}
/**
* @param sibling
* @param nodeName
* @param number
* @return nodes with the constrain
*/
public static Text selectDsNodeText(Node sibling, String nodeName, int number) {
Node n = selectDsNode(sibling,nodeName,number);
if (n == null) {
return null;
}
n = n.getFirstChild();
while (n != null && n.getNodeType() != Node.TEXT_NODE) {
n = n.getNextSibling();
}
return (Text)n;
}
/**
* @param sibling
* @param nodeName
* @param number
* @return nodes with the constrain
*/
public static Text selectDs11NodeText(Node sibling, String nodeName, int number) {
Node n = selectDs11Node(sibling,nodeName,number);
if (n == null) {
return null;
}
n = n.getFirstChild();
while (n != null && n.getNodeType() != Node.TEXT_NODE) {
n = n.getNextSibling();
}
return (Text)n;
}
/**
* @param sibling
* @param uri
* @param nodeName
* @param number
* @return nodes with the constrain
*/
public static Text selectNodeText(Node sibling, String uri, String nodeName, int number) {
Node n = selectNode(sibling,uri,nodeName,number);
if (n == null) {
return null;
}
n = n.getFirstChild();
while (n != null && n.getNodeType() != Node.TEXT_NODE) {
n = n.getNextSibling();
}
return (Text)n;
}
/**
* @param sibling
* @param uri
* @param nodeName
* @param number
* @return nodes with the constrain
*/
public static Element selectNode(Node sibling, String uri, String nodeName, int number) {
while (sibling != null) {
if (sibling.getNamespaceURI() != null && sibling.getNamespaceURI().equals(uri)
&& sibling.getLocalName().equals(nodeName)) {
if (number == 0){
return (Element)sibling;
}
number--;
}
sibling = sibling.getNextSibling();
}
return null;
}
/**
* @param sibling
* @param nodeName
* @return nodes with the constrain
*/
public static Element[] selectDsNodes(Node sibling, String nodeName) {
return selectNodes(sibling, Constants.SignatureSpecNS, nodeName);
}
/**
* @param sibling
* @param nodeName
* @return nodes with the constrain
*/
public static Element[] selectDs11Nodes(Node sibling, String nodeName) {
return selectNodes(sibling, Constants.SignatureSpec11NS, nodeName);
}
/**
* @param sibling
* @param uri
* @param nodeName
* @return nodes with the constraint
*/
public static Element[] selectNodes(Node sibling, String uri, String nodeName) {
List<Element> list = new ArrayList<Element>();
while (sibling != null) {
if (sibling.getNamespaceURI() != null && sibling.getNamespaceURI().equals(uri)
&& sibling.getLocalName().equals(nodeName)) {
list.add((Element)sibling);
}
sibling = sibling.getNextSibling();
}
return list.toArray(new Element[list.size()]);
}
/**
* @param signatureElement
* @param inputSet
* @return nodes with the constrain
*/
public static Set<Node> excludeNodeFromSet(Node signatureElement, Set<Node> inputSet) {
Set<Node> resultSet = new HashSet<Node>();
Iterator<Node> iterator = inputSet.iterator();
while (iterator.hasNext()) {
Node inputNode = iterator.next();
if (!XMLUtils.isDescendantOrSelf(signatureElement, inputNode)) {
resultSet.add(inputNode);
}
}
return resultSet;
}
/**
* Method getStrFromNode
*
* @param xpathnode
* @return the string for the node.
*/
public static String getStrFromNode(Node xpathnode) {
if (xpathnode.getNodeType() == Node.TEXT_NODE) {
// we iterate over all siblings of the context node because eventually,
// the text is "polluted" with pi's or comments
StringBuilder sb = new StringBuilder();
for (Node currentSibling = xpathnode.getParentNode().getFirstChild();
currentSibling != null;
currentSibling = currentSibling.getNextSibling()) {
if (currentSibling.getNodeType() == Node.TEXT_NODE) {
sb.append(((Text) currentSibling).getData());
}
}
return sb.toString();
} else if (xpathnode.getNodeType() == Node.ATTRIBUTE_NODE) {
return ((Attr) xpathnode).getNodeValue();
} else if (xpathnode.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) {
return ((ProcessingInstruction) xpathnode).getNodeValue();
}
return null;
}
/**
* Returns true if the descendantOrSelf is on the descendant-or-self axis
* of the context node.
*
* @param ctx
* @param descendantOrSelf
* @return true if the node is descendant
*/
public static boolean isDescendantOrSelf(Node ctx, Node descendantOrSelf) {
if (ctx == descendantOrSelf) {
return true;
}
Node parent = descendantOrSelf;
while (true) {
if (parent == null) {
return false;
}
if (parent == ctx) {
return true;
}
if (parent.getNodeType() == Node.ATTRIBUTE_NODE) {
parent = ((Attr) parent).getOwnerElement();
} else {
parent = parent.getParentNode();
}
}
}
public static boolean ignoreLineBreaks() {
return ignoreLineBreaks;
}
/**
* Returns the attribute value for the attribute with the specified name.
* Returns null if there is no such attribute, or
* the empty string if the attribute value is empty.
*
* <p>This works around a limitation of the DOM
* <code>Element.getAttributeNode</code> method, which does not distinguish
* between an unspecified attribute and an attribute with a value of
* "" (it returns "" for both cases).
*
* @param elem the element containing the attribute
* @param name the name of the attribute
* @return the attribute value (may be null if unspecified)
*/
public static String getAttributeValue(Element elem, String name) {
Attr attr = elem.getAttributeNodeNS(null, name);
return (attr == null) ? null : attr.getValue();
}
/**
* This method is a tree-search to help prevent against wrapping attacks. It checks that no
* two Elements have ID Attributes that match the "value" argument, if this is the case then
* "false" is returned. Note that a return value of "true" does not necessarily mean that
* a matching Element has been found, just that no wrapping attack has been detected.
*/
public static boolean protectAgainstWrappingAttack(Node startNode, String value) {
Node startParent = startNode.getParentNode();
Node processedNode = null;
Element foundElement = null;
String id = value.trim();
if (id.charAt(0) == '#') {
id = id.substring(1);
}
while (startNode != null) {
if (startNode.getNodeType() == Node.ELEMENT_NODE) {
Element se = (Element) startNode;
NamedNodeMap attributes = se.getAttributes();
if (attributes != null) {
for (int i = 0; i < attributes.getLength(); i++) {
Attr attr = (Attr)attributes.item(i);
if (attr.isId() && id.equals(attr.getValue())) {
if (foundElement == null) {
// Continue searching to find duplicates
foundElement = attr.getOwnerElement();
} else {
log.log(java.util.logging.Level.FINE, "Multiple elements with the same 'Id' attribute value!");
return false;
}
}
}
}
}
processedNode = startNode;
startNode = startNode.getFirstChild();
// no child, this node is done.
if (startNode == null) {
// close node processing, get sibling
startNode = processedNode.getNextSibling();
}
// no more siblings, get parent, all children
// of parent are processed.
while (startNode == null) {
processedNode = processedNode.getParentNode();
if (processedNode == startParent) {
return true;
}
// close parent node processing (processed node now)
startNode = processedNode.getNextSibling();
}
}
return true;
}
/**
* This method is a tree-search to help prevent against wrapping attacks. It checks that no other
* Element than the given "knownElement" argument has an ID attribute that matches the "value"
* argument, which is the ID value of "knownElement". If this is the case then "false" is returned.
*/
public static boolean protectAgainstWrappingAttack(
Node startNode, Element knownElement, String value
) {
Node startParent = startNode.getParentNode();
Node processedNode = null;
String id = value.trim();
if (id.charAt(0) == '#') {
id = id.substring(1);
}
while (startNode != null) {
if (startNode.getNodeType() == Node.ELEMENT_NODE) {
Element se = (Element) startNode;
NamedNodeMap attributes = se.getAttributes();
if (attributes != null) {
for (int i = 0; i < attributes.getLength(); i++) {
Attr attr = (Attr)attributes.item(i);
if (attr.isId() && id.equals(attr.getValue()) && se != knownElement) {
log.log(java.util.logging.Level.FINE, "Multiple elements with the same 'Id' attribute value!");
return false;
}
}
}
}
processedNode = startNode;
startNode = startNode.getFirstChild();
// no child, this node is done.
if (startNode == null) {
// close node processing, get sibling
startNode = processedNode.getNextSibling();
}
// no more siblings, get parent, all children
// of parent are processed.
while (startNode == null) {
processedNode = processedNode.getParentNode();
if (processedNode == startParent) {
return true;
}
// close parent node processing (processed node now)
startNode = processedNode.getNextSibling();
}
}
return true;
}
}
|
apache/harmony | 35,584 | classlib/modules/swing/src/test/api/java.injected/javax/swing/JComboBoxTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Anton Avtamonov
*/
package javax.swing;
import java.awt.Component;
import java.awt.EventQueue;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import javax.swing.event.ListDataListener;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import javax.swing.plaf.ComboBoxUI;
import javax.swing.plaf.basic.BasicComboBoxEditor;
@SuppressWarnings("serial")
public class JComboBoxTest extends SwingTestCase {
private JComboBox comboBox;
private JFrame frame;
public JComboBoxTest(final String name) {
super(name);
setIgnoreNotImplemented(true);
}
@Override
protected void setUp() throws Exception {
comboBox = new JComboBox();
propertyChangeController = new PropertyChangeController();
comboBox.addPropertyChangeListener(propertyChangeController);
}
@Override
protected void tearDown() throws Exception {
comboBox = null;
propertyChangeController = null;
if (frame != null) {
frame.dispose();
frame = null;
}
}
public void testJComboBox() throws Exception {
assertNotNull(comboBox.dataModel);
assertEquals(comboBox.dataModel, comboBox.getModel());
assertTrue(comboBox.dataModel instanceof DefaultComboBoxModel);
DefaultComboBoxModel newModel = new DefaultComboBoxModel();
comboBox = new JComboBox(newModel);
assertEquals(newModel, comboBox.getModel());
comboBox = new JComboBox(new Object[] { "1", "2", "3" });
assertEquals(3, comboBox.getModel().getSize());
Vector<String> newData = new Vector<String>();
newData.add("1");
newData.add("2");
comboBox = new JComboBox(newData);
assertEquals(2, comboBox.getModel().getSize());
assertFalse(propertyChangeController.isChanged());
}
public void testSetUI() throws Exception {
assertNotNull(comboBox.getUI());
ComboBoxUI newUI = new ComboBoxUI() {
@Override
public boolean isFocusTraversable(final JComboBox arg0) {
return false;
}
@Override
public boolean isPopupVisible(final JComboBox arg0) {
return false;
}
@Override
public void setPopupVisible(final JComboBox arg0, final boolean arg1) {
}
};
comboBox.setUI(newUI);
assertEquals(newUI, comboBox.getUI());
}
public void testGetUIClassID() throws Exception {
assertEquals("ComboBoxUI", comboBox.getUIClassID());
}
public void testGetSetModel() throws Exception {
assertNotNull(comboBox.getModel());
DefaultComboBoxModel newModel = new DefaultComboBoxModel();
comboBox.setModel(newModel);
assertEquals(newModel, comboBox.getModel());
assertTrue(propertyChangeController.isChanged("model"));
testExceptionalCase(new ExceptionalCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.setModel(null);
}
});
}
public void testSetIsLightWeightPopupEnabled() throws Exception {
assertTrue(comboBox.isLightWeightPopupEnabled());
comboBox.setLightWeightPopupEnabled(true);
assertFalse(propertyChangeController.isChanged());
comboBox.setLightWeightPopupEnabled(false);
assertFalse(comboBox.isLightWeightPopupEnabled());
assertTrue(propertyChangeController.isChanged("lightWeightPopupEnabled"));
}
public void testSetIsEditable() throws Exception {
assertFalse(comboBox.isEditable());
comboBox.setEditable(true);
assertTrue(comboBox.isEditable());
assertTrue(propertyChangeController.isChanged("editable"));
}
public void testGetSetMaximumRowCount() throws Exception {
assertEquals(8, comboBox.getMaximumRowCount());
comboBox.setMaximumRowCount(-3);
assertEquals(-3, comboBox.getMaximumRowCount());
assertTrue(propertyChangeController.isChanged("maximumRowCount"));
propertyChangeController.reset();
comboBox.setMaximumRowCount(5);
assertEquals(5, comboBox.getMaximumRowCount());
assertTrue(propertyChangeController.isChanged("maximumRowCount"));
}
public void testGetSetRenderer() throws Exception {
assertNotNull(comboBox.getRenderer());
DefaultListCellRenderer newRenderer = new DefaultListCellRenderer();
comboBox.setRenderer(newRenderer);
assertEquals(newRenderer, comboBox.getRenderer());
assertTrue(propertyChangeController.isChanged("renderer"));
}
public void testGetSetEditor() throws Exception {
assertNotNull(comboBox.getEditor());
ComboBoxEditor newEditor = new BasicComboBoxEditor();
comboBox.setEditor(newEditor);
assertEquals(newEditor, comboBox.getEditor());
assertTrue(propertyChangeController.isChanged("editor"));
comboBox.setEditor(null);
assertNull(comboBox.getEditor());
}
public void testSetGetSelectedItem() throws Exception {
ItemController itemController = new ItemController();
comboBox.addItemListener(itemController);
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
assertNull(comboBox.getSelectedItem());
comboBox.setSelectedItem("a");
assertNull(comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
assertNull(actionController.getEvent());
assertTrue(itemController.getEvents().isEmpty());
actionController.reset();
itemController.reset();
comboBox.setEditable(true);
comboBox.setSelectedItem("a");
assertEquals("a", comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
assertEquals(1, itemController.getEvents().size());
assertEquals(ItemEvent.SELECTED, itemController.getEvents().get(0).getStateChange());
assertEquals(ItemEvent.ITEM_STATE_CHANGED, itemController.getEvents().get(0).getID());
assertNotNull(actionController.getEvent());
actionController.reset();
itemController.reset();
comboBox.setSelectedItem("b");
assertEquals("b", comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
assertEquals(2, itemController.getEvents().size());
assertEquals(ItemEvent.DESELECTED, itemController.getEvents().get(0).getStateChange());
assertEquals(ItemEvent.ITEM_STATE_CHANGED, itemController.getEvents().get(0).getID());
assertEquals(ItemEvent.SELECTED, itemController.getEvents().get(1).getStateChange());
assertEquals(ItemEvent.ITEM_STATE_CHANGED, itemController.getEvents().get(1).getID());
assertNotNull(actionController.getEvent());
actionController.reset();
itemController.reset();
assertEquals("b", comboBox.getSelectedItem());
comboBox.setSelectedItem("b");
assertTrue(itemController.getEvents().isEmpty());
assertNotNull(actionController.getEvent());
actionController.reset();
itemController.reset();
comboBox.setEditable(false);
comboBox.addItem("a");
comboBox.addItem("b");
assertEquals(1, comboBox.getSelectedIndex());
comboBox.setSelectedItem("c");
assertEquals("b", comboBox.getSelectedItem());
assertEquals(1, comboBox.getSelectedIndex());
assertNull(actionController.getEvent());
assertTrue(itemController.getEvents().isEmpty());
assertEquals(1, comboBox.getSelectedIndex());
comboBox.setSelectedItem("b");
assertEquals(1, comboBox.getSelectedIndex());
assertNotNull(actionController.getEvent());
assertTrue(itemController.getEvents().isEmpty());
comboBox.setSelectedItem("a");
assertEquals("a", comboBox.getSelectedItem());
assertEquals(0, comboBox.getSelectedIndex());
assertNotNull(actionController.getEvent());
assertEquals(2, itemController.getEvents().size());
comboBox.removeItem("a");
assertEquals("b", comboBox.getSelectedItem());
assertEquals(0, comboBox.getSelectedIndex());
}
public void testGetSetSelectedIndex() throws Exception {
assertEquals(-1, comboBox.getSelectedIndex());
testExceptionalCase(new IllegalArgumentCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.setSelectedIndex(0);
}
});
testExceptionalCase(new IllegalArgumentCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.setSelectedIndex(-2);
}
});
comboBox.setSelectedIndex(-1);
assertEquals(-1, comboBox.getSelectedIndex());
comboBox.addItem("a");
comboBox.addItem("b");
assertEquals(0, comboBox.getSelectedIndex());
assertEquals("a", comboBox.getSelectedItem());
comboBox.setSelectedIndex(0);
assertEquals(0, comboBox.getSelectedIndex());
assertEquals("a", comboBox.getSelectedItem());
comboBox.removeItem("a");
assertEquals(0, comboBox.getSelectedIndex());
assertEquals("b", comboBox.getSelectedItem());
comboBox.addItem("c");
comboBox.addItem("d");
comboBox.addItem("e");
comboBox.setSelectedItem("d");
assertEquals(2, comboBox.getSelectedIndex());
comboBox.removeItem("d");
assertEquals(1, comboBox.getSelectedIndex());
assertEquals("c", comboBox.getSelectedItem());
comboBox.setEditable(true);
comboBox.setSelectedItem("f");
assertEquals(-1, comboBox.getSelectedIndex());
}
public void testPrototypeDisplayValue() throws Exception {
assertNull(comboBox.getPrototypeDisplayValue());
comboBox.setPrototypeDisplayValue("a");
assertEquals("a", comboBox.getPrototypeDisplayValue());
assertTrue(propertyChangeController.isChanged("prototypeDisplayValue"));
}
public void testAddItem() throws Exception {
ItemController itemController = new ItemController();
comboBox.addItemListener(itemController);
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
assertNull(comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
comboBox.addItem("a");
assertEquals(1, comboBox.getModel().getSize());
assertEquals(1, itemController.getEvents().size());
assertNotNull(actionController.getEvent());
assertEquals("a", comboBox.getSelectedItem());
assertEquals(0, comboBox.getSelectedIndex());
itemController.reset();
actionController.reset();
comboBox.addItem("b");
assertEquals(0, itemController.getEvents().size());
assertNull(actionController.getEvent());
ComboBoxModel immutableModel = new ComboBoxModel() {
public Object getSelectedItem() {
return null;
}
public void setSelectedItem(final Object value) {
}
public void addListDataListener(final ListDataListener l) {
}
public Object getElementAt(final int index) {
return null;
}
public int getSize() {
return 0;
}
public void removeListDataListener(final ListDataListener l) {
}
};
comboBox.setModel(immutableModel);
testExceptionalCase(new ExceptionalCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.addItem("a");
}
});
}
public void testInsertItemAt() throws Exception {
ItemController itemController = new ItemController();
comboBox.addItemListener(itemController);
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
assertNull(comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
comboBox.insertItemAt("a", 0);
assertEquals(1, comboBox.getModel().getSize());
assertEquals(0, itemController.getEvents().size());
assertNull(actionController.getEvent());
assertNull(comboBox.getSelectedItem());
assertEquals(-1, comboBox.getSelectedIndex());
itemController.reset();
actionController.reset();
comboBox.insertItemAt("b", 1);
assertEquals(0, itemController.getEvents().size());
assertNull(actionController.getEvent());
ComboBoxModel immutableModel = new ComboBoxModel() {
public Object getSelectedItem() {
return null;
}
public void setSelectedItem(final Object value) {
}
public void addListDataListener(final ListDataListener l) {
}
public Object getElementAt(final int index) {
return null;
}
public int getSize() {
return 0;
}
public void removeListDataListener(final ListDataListener l) {
}
};
comboBox.setModel(immutableModel);
testExceptionalCase(new ExceptionalCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.insertItemAt("c", 0);
}
});
}
public void testRemoveItem() throws Exception {
ItemController itemController = new ItemController();
comboBox.addItemListener(itemController);
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
comboBox.addItem("a");
comboBox.addItem("b");
assertEquals("a", comboBox.getSelectedItem());
itemController.reset();
actionController.reset();
comboBox.removeItem("a");
assertEquals("b", comboBox.getSelectedItem());
assertEquals(1, comboBox.getModel().getSize());
assertEquals(2, itemController.getEvents().size());
assertNotNull(actionController.getEvent());
itemController.reset();
actionController.reset();
comboBox.removeItem("a");
assertEquals(1, comboBox.getModel().getSize());
assertEquals(0, itemController.getEvents().size());
assertNull(actionController.getEvent());
}
public void testRemoveItemAt() throws Exception {
ItemController itemController = new ItemController();
comboBox.addItemListener(itemController);
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
comboBox.addItem("a");
comboBox.addItem("b");
assertEquals("a", comboBox.getSelectedItem());
itemController.reset();
actionController.reset();
comboBox.removeItemAt(0);
assertEquals("b", comboBox.getSelectedItem());
assertEquals(1, comboBox.getModel().getSize());
assertEquals(2, itemController.getEvents().size());
assertNotNull(actionController.getEvent());
itemController.reset();
actionController.reset();
comboBox.removeItemAt(0);
assertEquals(0, comboBox.getModel().getSize());
assertNull(comboBox.getSelectedItem());
assertEquals(1, itemController.getEvents().size());
assertNotNull(actionController.getEvent());
testExceptionalCase(new ExceptionalCase() {
@Override
public void exceptionalAction() throws Exception {
comboBox.removeItemAt(0);
}
});
}
public void testGetItemCount() throws Exception {
assertEquals(0, comboBox.getItemCount());
comboBox.addItem("a");
assertEquals(1, comboBox.getItemCount());
comboBox.addItem("b");
assertEquals(2, comboBox.getItemCount());
comboBox.removeItem("b");
assertEquals(1, comboBox.getItemCount());
}
public void testGetItemAt() throws Exception {
assertNull(comboBox.getItemAt(0));
assertNull(comboBox.getItemAt(-1));
comboBox.addItem("a");
comboBox.addItem("b");
assertEquals("a", comboBox.getItemAt(0));
assertEquals("b", comboBox.getItemAt(1));
assertNull(comboBox.getItemAt(2));
}
public void testAddRemoveGetFireItemListener() throws Exception {
comboBox.getUI().uninstallUI(comboBox);
assertEquals(0, comboBox.getItemListeners().length);
ItemController l = new ItemController();
comboBox.addItemListener(l);
assertEquals(1, comboBox.getItemListeners().length);
comboBox.addItemListener(new ItemController());
assertEquals(2, comboBox.getItemListeners().length);
comboBox.fireItemStateChanged(new ItemEvent(comboBox, ItemEvent.ITEM_STATE_CHANGED,
"a", ItemEvent.SELECTED));
assertEquals(1, l.getEvents().size());
assertEquals(comboBox, l.getEvents().get(0).getSource());
comboBox.removeItemListener(l);
assertEquals(1, comboBox.getItemListeners().length);
}
public void testAddRemoveGetFireActionListener() throws Exception {
comboBox.getUI().uninstallUI(comboBox);
assertEquals(0, comboBox.getItemListeners().length);
ActionController l = new ActionController();
comboBox.addActionListener(l);
assertEquals(1, comboBox.getActionListeners().length);
comboBox.addActionListener(new ActionController());
assertEquals(2, comboBox.getActionListeners().length);
comboBox.fireActionEvent();
assertNotNull(l.getEvent());
assertEquals(comboBox, l.getEvent().getSource());
comboBox.removeActionListener(l);
assertEquals(1, comboBox.getActionListeners().length);
}
public void testAddRemoveGetFirePopupListener() throws Exception {
comboBox.getUI().uninstallUI(comboBox);
assertEquals(0, comboBox.getItemListeners().length);
PopupMenuController l = new PopupMenuController();
comboBox.addPopupMenuListener(l);
assertEquals(1, comboBox.getPopupMenuListeners().length);
comboBox.addPopupMenuListener(new PopupMenuController());
assertEquals(2, comboBox.getPopupMenuListeners().length);
comboBox.firePopupMenuCanceled();
assertNotNull(l.getEvent());
assertEquals(PopupMenuController.CANCELLED, l.getEventType());
l.reset();
comboBox.firePopupMenuWillBecomeVisible();
assertNotNull(l.getEvent());
assertEquals(PopupMenuController.VISIBLE, l.getEventType());
assertEquals(comboBox, l.getEvent().getSource());
l.reset();
comboBox.firePopupMenuWillBecomeInvisible();
assertNotNull(l.getEvent());
assertEquals(PopupMenuController.INVISIBLE, l.getEventType());
comboBox.removePopupMenuListener(l);
assertEquals(1, comboBox.getPopupMenuListeners().length);
}
public void testGetSetActionCommand() throws Exception {
assertEquals("comboBoxChanged", comboBox.getActionCommand());
comboBox.setActionCommand("anotherCommand");
assertEquals("anotherCommand", comboBox.getActionCommand());
assertFalse(propertyChangeController.isChanged());
ActionController actionController = new ActionController();
comboBox.addActionListener(actionController);
comboBox.addItem("any");
assertEquals("anotherCommand", actionController.getEvent().getActionCommand());
}
public void testGetSelectedObjects() throws Exception {
assertNull(comboBox.getSelectedItem());
assertEquals(0, comboBox.getSelectedObjects().length);
comboBox.setEditable(true);
comboBox.setSelectedItem("a");
assertEquals("a", comboBox.getSelectedObjects()[0]);
}
public void testSetEnabled() throws Exception {
assertTrue(comboBox.isEnabled());
comboBox.setEnabled(false);
assertFalse(comboBox.isEnabled());
assertTrue(propertyChangeController.isChanged("enabled"));
}
public void testSetGetAction() throws Exception {
assertEquals(0, comboBox.getActionListeners().length);
assertNull(comboBox.getAction());
TestAction action = new TestAction();
comboBox.setAction(action);
assertTrue(propertyChangeController.isChanged("action"));
assertEquals(action, comboBox.getAction());
assertEquals(1, comboBox.getActionListeners().length);
propertyChangeController.reset();
comboBox.setAction(action);
assertFalse(propertyChangeController.isChanged());
assertEquals(1, comboBox.getActionListeners().length);
action.reset();
comboBox.fireActionEvent();
assertEquals(1, action.getEvents().size());
action.reset();
comboBox.addActionListener(action);
comboBox.fireActionEvent();
assertEquals(2, action.getEvents().size());
assertEquals(2, comboBox.getActionListeners().length);
action.reset();
comboBox.setAction(null);
assertNull(comboBox.getAction());
comboBox.fireActionEvent();
assertEquals(1, action.getEvents().size());
assertEquals(1, comboBox.getActionListeners().length);
action.reset();
comboBox.setAction(action);
comboBox.fireActionEvent();
assertEquals(1, action.getEvents().size());
assertEquals(1, comboBox.getActionListeners().length);
}
public void testIsSetPopupVisible() throws Exception {
createVisibleComboBox();
assertFalse(comboBox.isPopupVisible());
assertFalse(comboBox.getUI().isPopupVisible(comboBox));
PopupMenuController pmc = new PopupMenuController();
comboBox.addPopupMenuListener(pmc);
comboBox.setPopupVisible(true);
assertTrue(comboBox.isPopupVisible());
assertTrue(comboBox.getUI().isPopupVisible(comboBox));
assertNotNull(pmc.getEvent());
assertEquals(PopupMenuController.VISIBLE, pmc.getEventType());
pmc.reset();
comboBox.getUI().setPopupVisible(comboBox, false);
assertFalse(comboBox.isPopupVisible());
assertNotNull(pmc.getEvent());
assertEquals(PopupMenuController.INVISIBLE, pmc.getEventType());
}
public void testShowHidePopup() throws Exception {
createVisibleComboBox();
assertFalse(comboBox.isPopupVisible());
comboBox.showPopup();
assertTrue(comboBox.isPopupVisible());
comboBox.hidePopup();
assertFalse(comboBox.isPopupVisible());
}
public void testCreateDefaultKeySelectionManager() throws Exception {
JComboBox.KeySelectionManager ksm = comboBox.createDefaultKeySelectionManager();
assertNotNull(ksm);
comboBox.setKeySelectionManager(null);
comboBox.selectWithKeyChar('a');
assertNotNull(comboBox.getKeySelectionManager());
}
public void testDefaultKeySelectionManager() throws Exception {
JComboBox.KeySelectionManager ksm = comboBox.createDefaultKeySelectionManager();
DefaultComboBoxModel model = new DefaultComboBoxModel();
assertEquals(-1, ksm.selectionForKey('a', model));
model.addElement("a 0");
model.addElement("b 0");
model.addElement(" b 0");
assertEquals(0, ksm.selectionForKey('a', model));
assertEquals(1, ksm.selectionForKey('b', model));
assertEquals(2, ksm.selectionForKey(' ', model));
}
@SuppressWarnings("deprecation")
public void testProcessKeyEvent() throws Exception {
comboBox.setKeySelectionManager(null);
createVisibleComboBox();
PopupMenuController pmc = new PopupMenuController();
comboBox.addPopupMenuListener(pmc);
KeyEvent event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_A);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_TAB);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
comboBox.setPopupVisible(true);
assertNotNull(pmc.getEvent());
assertEquals(PopupMenuController.VISIBLE, pmc.getEventType());
pmc.reset();
comboBox.addItem("a");
comboBox.addItem("b");
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_A);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("a", comboBox.getSelectedItem());
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_B);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("b", comboBox.getSelectedItem());
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_A);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("a", comboBox.getSelectedItem());
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_TAB);
comboBox.processKeyEvent(event);
assertNotNull(pmc.getEvent());
assertEquals(PopupMenuController.INVISIBLE, pmc.getEventType());
assertEquals("a", comboBox.getSelectedItem());
comboBox.setKeySelectionManager(new JComboBox.KeySelectionManager() {
public int selectionForKey(final char key, final ComboBoxModel model) {
return -1;
}
});
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_TAB);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("a", comboBox.getSelectedItem());
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_A);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("a", comboBox.getSelectedItem());
pmc.reset();
event = new KeyEvent(comboBox, KeyEvent.KEY_PRESSED, EventQueue
.getMostRecentEventTime(), 0, KeyEvent.VK_B);
comboBox.processKeyEvent(event);
assertNull(pmc.getEvent());
assertEquals("a", comboBox.getSelectedItem());
}
public void testSelectWithKeyChar() throws Exception {
comboBox.setKeySelectionManager(null);
PopupMenuController pmc = new PopupMenuController();
comboBox.addPopupMenuListener(pmc);
assertFalse(comboBox.selectWithKeyChar('a'));
assertNull(pmc.getEvent());
comboBox.addItem("a1");
comboBox.addItem("a2");
comboBox.addItem("a3");
comboBox.addItem("b1");
assertEquals("a1", comboBox.getSelectedItem());
assertFalse(comboBox.selectWithKeyChar('c'));
assertTrue(comboBox.selectWithKeyChar('A'));
assertEquals("a2", comboBox.getSelectedItem());
assertNull(pmc.getEvent());
assertTrue(comboBox.selectWithKeyChar('a'));
assertEquals("a3", comboBox.getSelectedItem());
assertNull(pmc.getEvent());
assertTrue(comboBox.selectWithKeyChar('A'));
assertEquals("a1", comboBox.getSelectedItem());
assertNull(pmc.getEvent());
assertTrue(comboBox.selectWithKeyChar('b'));
assertEquals("b1", comboBox.getSelectedItem());
assertNull(pmc.getEvent());
assertTrue(comboBox.selectWithKeyChar('b'));
assertEquals("b1", comboBox.getSelectedItem());
assertNull(pmc.getEvent());
comboBox.setKeySelectionManager(new JComboBox.KeySelectionManager() {
public int selectionForKey(final char key, final ComboBoxModel model) {
return -1;
}
});
assertFalse(comboBox.selectWithKeyChar('a'));
assertEquals("b1", comboBox.getSelectedItem());
assertFalse(comboBox.selectWithKeyChar('b'));
assertEquals("b1", comboBox.getSelectedItem());
comboBox.setKeySelectionManager(new JComboBox.KeySelectionManager() {
public int selectionForKey(final char key, final ComboBoxModel model) {
return 1;
}
});
assertTrue(comboBox.selectWithKeyChar('a'));
assertEquals("a2", comboBox.getSelectedItem());
assertTrue(comboBox.selectWithKeyChar('b'));
assertEquals("a2", comboBox.getSelectedItem());
assertTrue(comboBox.selectWithKeyChar('c'));
assertEquals("a2", comboBox.getSelectedItem());
}
public void testGetSetKeySelectionManager() throws Exception {
assertNotNull(comboBox.getKeySelectionManager());
JComboBox.KeySelectionManager manager = new JComboBox.KeySelectionManager() {
public int selectionForKey(final char key, final ComboBoxModel model) {
return 0;
}
};
comboBox.setKeySelectionManager(manager);
assertEquals(manager, comboBox.getKeySelectionManager());
assertFalse(propertyChangeController.isChanged());
}
public void testCreateActionPropertyChangeListener() throws Exception {
Action action1 = new AbstractAction() {
public void actionPerformed(final ActionEvent e) {
}
};
comboBox.setAction(action1);
assertTrue(comboBox.isEnabled());
action1.setEnabled(false);
assertFalse(comboBox.isEnabled());
action1.setEnabled(true);
assertTrue(comboBox.isEnabled());
Action action2 = new AbstractAction() {
public void actionPerformed(final ActionEvent e) {
}
};
comboBox.setAction(action2);
action1.setEnabled(false);
assertTrue(comboBox.isEnabled());
action2.setEnabled(false);
assertFalse(comboBox.isEnabled());
action2.setEnabled(true);
assertTrue(comboBox.isEnabled());
comboBox.setAction(null);
assertTrue(comboBox.isEnabled());
action2.setEnabled(false);
assertTrue(comboBox.isEnabled());
}
public void testConfigurePropertiesFromAction() throws Exception {
comboBox.setToolTipText("combo tooltip");
comboBox.setEnabled(false);
assertEquals("combo tooltip", comboBox.getToolTipText());
assertFalse(comboBox.isEnabled());
Action action = new AbstractAction() {
public void actionPerformed(final ActionEvent e) {
}
};
action.putValue(Action.SHORT_DESCRIPTION, "action tooltip");
comboBox.setAction(action);
assertEquals("action tooltip", comboBox.getToolTipText());
assertTrue(comboBox.isEnabled());
comboBox.setAction(null);
assertNull(comboBox.getToolTipText());
assertTrue(comboBox.isEnabled());
}
public void testInstallAncestorListener() throws Exception {
assertEquals(1, comboBox.getAncestorListeners().length);
}
public void testHarmony5223() {
ComboBoxEditor editor = new NullComboBoxEditor();
comboBox.setEditor(editor);
assertEquals(editor, comboBox.getEditor());
}
public class NullComboBoxEditor extends BasicComboBoxEditor {
public NullComboBoxEditor() {
super();
}
public Component getEditorComponent() {
return null;
}
}
private class ActionController implements ActionListener {
private ActionEvent event;
public void actionPerformed(final ActionEvent e) {
event = e;
}
public void reset() {
event = null;
}
public ActionEvent getEvent() {
return event;
}
}
private class TestAction extends AbstractAction {
private List<ActionEvent> events = new ArrayList<ActionEvent>();
public void actionPerformed(final ActionEvent e) {
events.add(e);
}
public void reset() {
events.clear();
}
public List<ActionEvent> getEvents() {
return events;
}
}
private class ItemController implements ItemListener {
private List<ItemEvent> eventList = new ArrayList<ItemEvent>();
public void itemStateChanged(final ItemEvent e) {
eventList.add(e);
}
public void reset() {
eventList.clear();
}
public List<ItemEvent> getEvents() {
return eventList;
}
}
private static class PopupMenuController implements PopupMenuListener {
public static final int CANCELLED = 0;
public static final int VISIBLE = 1;
public static final int INVISIBLE = 2;
private PopupMenuEvent event;
private int eventType = -1;
public void reset() {
event = null;
eventType = -1;
}
public PopupMenuEvent getEvent() {
return event;
}
public int getEventType() {
return eventType;
}
public void popupMenuCanceled(final PopupMenuEvent e) {
event = e;
eventType = CANCELLED;
}
public void popupMenuWillBecomeInvisible(final PopupMenuEvent e) {
event = e;
eventType = INVISIBLE;
}
public void popupMenuWillBecomeVisible(final PopupMenuEvent e) {
event = e;
eventType = VISIBLE;
}
}
@SuppressWarnings("deprecation")
private void createVisibleComboBox() {
frame = new JFrame();
frame.getContentPane().add(comboBox);
frame.show();
}
}
|
apache/iceberg | 35,106 | spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.sql;
import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.TableProperties;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.hive.TestHiveMetastore;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.CatalogTestBase;
import org.apache.iceberg.spark.SparkReadOptions;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.ExplainMode;
import org.apache.spark.sql.functions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(ParameterizedTestExtension.class)
public class TestAggregatePushDown extends CatalogTestBase {
@BeforeAll
public static void startMetastoreAndSpark() {
TestBase.metastore = new TestHiveMetastore();
metastore.start();
TestBase.hiveConf = metastore.hiveConf();
TestBase.spark.close();
TestBase.spark =
SparkSession.builder()
.master("local[2]")
.config("spark.sql.iceberg.aggregate_pushdown", "true")
.enableHiveSupport()
.getOrCreate();
TestBase.catalog =
(HiveCatalog)
CatalogUtil.loadCatalog(
HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf);
try {
catalog.createNamespace(Namespace.of("default"));
} catch (AlreadyExistsException ignored) {
// the default namespace already exists. ignore the create error
}
}
@AfterEach
public void removeTables() {
sql("DROP TABLE IF EXISTS %s", tableName);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInPartitionedTable() {
testDifferentDataTypesAggregatePushDown(true);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInNonPartitionedTable() {
testDifferentDataTypesAggregatePushDown(false);
}
@SuppressWarnings("checkstyle:CyclomaticComplexity")
private void testDifferentDataTypesAggregatePushDown(boolean hasPartitionCol) {
String createTable;
if (hasPartitionCol) {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg PARTITIONED BY (id)";
} else {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES "
+ "(1, null, false, null, null, 11.11, X'1111'),"
+ " (1, null, true, 2.222, 2.222222, 22.22, X'2222'),"
+ " (2, 33, false, 3.333, 3.333333, 33.33, X'3333'),"
+ " (2, 44, true, null, 4.444444, 44.44, X'4444'),"
+ " (3, 55, false, 5.555, 5.555555, 55.55, X'5555'),"
+ " (3, null, true, null, 6.666666, 66.66, null) ",
tableName);
String select =
"SELECT count(*), max(id), min(id), count(id), "
+ "max(int_data), min(int_data), count(int_data), "
+ "max(boolean_data), min(boolean_data), count(boolean_data), "
+ "max(float_data), min(float_data), count(float_data), "
+ "max(double_data), min(double_data), count(double_data), "
+ "max(decimal_data), min(decimal_data), count(decimal_data), "
+ "max(binary_data), min(binary_data), count(binary_data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(*)")
&& explainString.contains("max(id)")
&& explainString.contains("min(id)")
&& explainString.contains("count(id)")
&& explainString.contains("max(int_data)")
&& explainString.contains("min(int_data)")
&& explainString.contains("count(int_data)")
&& explainString.contains("max(boolean_data)")
&& explainString.contains("min(boolean_data)")
&& explainString.contains("count(boolean_data)")
&& explainString.contains("max(float_data)")
&& explainString.contains("min(float_data)")
&& explainString.contains("count(float_data)")
&& explainString.contains("max(double_data)")
&& explainString.contains("min(double_data)")
&& explainString.contains("count(double_data)")
&& explainString.contains("max(decimal_data)")
&& explainString.contains("min(decimal_data)")
&& explainString.contains("count(decimal_data)")
&& explainString.contains("max(binary_data)")
&& explainString.contains("min(binary_data)")
&& explainString.contains("count(binary_data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
3L,
1L,
6L,
55,
33,
3L,
true,
false,
6L,
5.555f,
2.222f,
3L,
6.666666,
2.222222,
5L,
new BigDecimal("66.66"),
new BigDecimal("11.11"),
6L,
new byte[] {85, 85},
new byte[] {17, 17},
5L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testDateAndTimestampWithPartition() {
sql(
"CREATE TABLE %s (id bigint, data string, d date, ts timestamp) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, '1', date('2021-11-10'), null),"
+ "(1, '2', date('2021-11-11'), timestamp('2021-11-11 22:22:22')), "
+ "(2, '3', date('2021-11-12'), timestamp('2021-11-12 22:22:22')), "
+ "(2, '4', date('2021-11-13'), timestamp('2021-11-13 22:22:22')), "
+ "(3, '5', null, timestamp('2021-11-14 22:22:22')), "
+ "(3, '6', date('2021-11-14'), null)",
tableName);
String select = "SELECT max(d), min(d), count(d), max(ts), min(ts), count(ts) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(d)")
&& explainString.contains("min(d)")
&& explainString.contains("count(d)")
&& explainString.contains("max(ts)")
&& explainString.contains("min(ts)")
&& explainString.contains("count(ts)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
Date.valueOf("2021-11-14"),
Date.valueOf("2021-11-10"),
5L,
Timestamp.valueOf("2021-11-14 22:22:22.0"),
Timestamp.valueOf("2021-11-11 22:22:22.0"),
4L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregateNotPushDownIfOneCantPushDown() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
String select = "SELECT COUNT(data), SUM(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, 23331.0});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregatePushDownWithMetricsMode() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "none");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "id", "counts");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "data", "none");
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666)",
tableName);
String select1 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
// count(data) is not pushed down because the metrics mode is `none`
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(id) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// count(id) is pushed down because the metrics mode is `counts`
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
String select3 = "SELECT COUNT(id), MAX(id) FROM %s";
explainContainsPushDownAggregates = false;
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// COUNT(id), MAX(id) are not pushed down because MAX(id) is not pushed down (metrics mode is
// `counts`)
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, 3L});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregateNotPushDownForStringType() {
sql("CREATE TABLE %s (id LONG, data STRING) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, '1111'), (1, '2222'), (2, '3333'), (2, '4444'), (3, '5555'), (3, '6666') ",
tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "truncate(16)");
String select1 = "SELECT MAX(id), MAX(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("max(id)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {3L, "6666"});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
explainContainsPushDownAggregates = false;
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "full");
String select3 = "SELECT count(data), max(data) FROM %s";
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(data)") && explainString3.contains("max(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, "6666"});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregatePushDownWithDataFilter() {
testAggregatePushDownWithFilter(false);
}
@TestTemplate
public void testAggregatePushDownWithPartitionFilter() {
testAggregatePushDownWithFilter(true);
}
private void testAggregatePushDownWithFilter(boolean partitionFilerOnly) {
String createTable;
if (!partitionFilerOnly) {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg";
} else {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg PARTITIONED BY (id)";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES"
+ " (1, 11),"
+ " (1, 22),"
+ " (2, 33),"
+ " (2, 44),"
+ " (3, 55),"
+ " (3, 66) ",
tableName);
String select = "SELECT MIN(data) FROM %s WHERE id > 1";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("min(data)")) {
explainContainsPushDownAggregates = true;
}
if (!partitionFilerOnly) {
// Filters are not completely pushed down, we can't push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
} else {
// Filters are not completely pushed down, we can push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
}
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {33});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregateWithComplexType() {
sql("CREATE TABLE %s (id INT, complex STRUCT<c1:INT,c2:STRING>) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", 3, \"c2\", \"v1\")),"
+ "(2, named_struct(\"c1\", 2, \"c2\", \"v2\")), (3, null)",
tableName);
String select1 = "SELECT count(complex), count(id) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select1, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("count not pushed down for complex types")
.isFalse();
List<Object[]> actual = sql(select1, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {2L, 3L});
assertEquals("count not push down", actual, expected);
String select2 = "SELECT max(complex) FROM %s";
explain = sql("EXPLAIN " + select2, tableName);
explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
explainContainsPushDownAggregates = false;
if (explainString.contains("max(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("max not pushed down for complex types")
.isFalse();
}
@TestTemplate
public void testAggregationPushdownStructInteger() {
sql("CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:BIGINT>) USING iceberg", tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1)",
"max(struct_with_int.c1)",
"min(struct_with_int.c1)");
}
@TestTemplate
public void testAggregationPushdownNestedStruct() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:STRUCT<c2:STRUCT<c3:STRUCT<c4:BIGINT>>>>) USING iceberg",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", NULL)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 2)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 3)))))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1.c2.c3.c4";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1.c2.c3.c4)",
"max(struct_with_int.c1.c2.c3.c4)",
"min(struct_with_int.c1.c2.c3.c4)");
}
@TestTemplate
public void testAggregationPushdownStructTimestamp() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_ts STRUCT<c1:TIMESTAMP>) USING iceberg",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", timestamp('2023-01-30T22:22:22Z')))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", timestamp('2023-01-30T22:23:23Z')))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_ts.c1";
assertAggregates(
sql(query, aggField, aggField, aggField, tableName),
2L,
new Timestamp(1675117403000L),
new Timestamp(1675117342000L));
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_ts.c1)",
"max(struct_with_ts.c1)",
"min(struct_with_ts.c1)");
}
@TestTemplate
public void testAggregationPushdownOnBucketedColumn() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:INT>) USING iceberg PARTITIONED BY (bucket(8, id))",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (null, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "id";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 2L, 1L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(id)",
"max(id)",
"min(id)");
}
private void assertAggregates(
List<Object[]> actual, Object expectedCount, Object expectedMax, Object expectedMin) {
Object actualCount = actual.get(0)[0];
Object actualMax = actual.get(0)[1];
Object actualMin = actual.get(0)[2];
assertThat(actualCount).as("Expected and actual count should equal").isEqualTo(expectedCount);
assertThat(actualMax).as("Expected and actual max should equal").isEqualTo(expectedMax);
assertThat(actualMin).as("Expected and actual min should equal").isEqualTo(expectedMin);
}
private void assertExplainContains(List<Object[]> explain, String... expectedFragments) {
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
Arrays.stream(expectedFragments)
.forEach(
fragment ->
assertThat(explainString)
.as("Expected to find plan fragment in explain plan")
.contains(fragment));
}
@TestTemplate
public void testAggregatePushDownInDeleteCopyOnWrite() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
sql("DELETE FROM %s WHERE data = 1111", tableName);
String select = "SELECT max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("min/max/count pushed down for deleted")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6666, 2222, 5L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForTimeTravel() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
List<Object[]> expected1 = sql("SELECT count(id) FROM %s", tableName);
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
List<Object[]> expected2 = sql("SELECT count(id) FROM %s", tableName);
List<Object[]> explain1 =
sql("EXPLAIN SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates1 = false;
if (explainString1.contains("count(id)")) {
explainContainsPushDownAggregates1 = true;
}
assertThat(explainContainsPushDownAggregates1).as("count pushed down").isTrue();
List<Object[]> actual1 =
sql("SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
assertEquals("count push down", expected1, actual1);
List<Object[]> explain2 = sql("EXPLAIN SELECT count(id) FROM %s", tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates2 = false;
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates2 = true;
}
assertThat(explainContainsPushDownAggregates2).as("count pushed down").isTrue();
List<Object[]> actual2 = sql("SELECT count(id) FROM %s", tableName);
assertEquals("count push down", expected2, actual2);
}
@TestTemplate
public void testAllNull() {
sql("CREATE TABLE %s (id int, data int) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, null),"
+ "(1, null), "
+ "(2, null), "
+ "(2, null), "
+ "(3, null), "
+ "(3, null)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, null, null, 0L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAllNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, float('nan')), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, float('nan'))",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, Float.NaN, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, 2), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, 1)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, 1.0F, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testInfinity() {
sql(
"CREATE TABLE %s (id int, data1 float, data2 double, data3 double) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, float('-infinity'), double('infinity'), 1.23), "
+ "(1, float('-infinity'), double('infinity'), -1.23), "
+ "(1, float('-infinity'), double('infinity'), double('infinity')), "
+ "(1, float('-infinity'), double('infinity'), 2.23), "
+ "(1, float('-infinity'), double('infinity'), double('-infinity')), "
+ "(1, float('-infinity'), double('infinity'), -2.23)",
tableName);
String select =
"SELECT count(*), max(data1), min(data1), count(data1), max(data2), min(data2), count(data2), max(data3), min(data3), count(data3) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data1)")
&& explainString.contains("min(data1)")
&& explainString.contains("count(data1)")
&& explainString.contains("max(data2)")
&& explainString.contains("min(data2)")
&& explainString.contains("count(data2)")
&& explainString.contains("max(data3)")
&& explainString.contains("min(data3)")
&& explainString.contains("count(data3)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.POSITIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.NEGATIVE_INFINITY,
6L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForIncrementalScan() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId1 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
long snapshotId2 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (6, -7777), (7, 8888)", tableName);
long snapshotId3 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (8, 7777), (9, 9999)", tableName);
Dataset<Row> pushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId2)
.option(SparkReadOptions.END_SNAPSHOT_ID, snapshotId3)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain1 = pushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain1).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {-7777, 8888, 2L});
assertEquals("min/max/count push down", expected1, rowsToJava(pushdownDs.collectAsList()));
Dataset<Row> unboundedPushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId1)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain2 =
unboundedPushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain2).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {-7777, 9999, 6L});
assertEquals(
"min/max/count push down", expected2, rowsToJava(unboundedPushdownDs.collectAsList()));
}
}
|
apache/iceberg | 35,106 | spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.sql;
import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.TableProperties;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.hive.TestHiveMetastore;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.CatalogTestBase;
import org.apache.iceberg.spark.SparkReadOptions;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.ExplainMode;
import org.apache.spark.sql.functions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(ParameterizedTestExtension.class)
public class TestAggregatePushDown extends CatalogTestBase {
@BeforeAll
public static void startMetastoreAndSpark() {
TestBase.metastore = new TestHiveMetastore();
metastore.start();
TestBase.hiveConf = metastore.hiveConf();
TestBase.spark.close();
TestBase.spark =
SparkSession.builder()
.master("local[2]")
.config("spark.sql.iceberg.aggregate_pushdown", "true")
.enableHiveSupport()
.getOrCreate();
TestBase.catalog =
(HiveCatalog)
CatalogUtil.loadCatalog(
HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf);
try {
catalog.createNamespace(Namespace.of("default"));
} catch (AlreadyExistsException ignored) {
// the default namespace already exists. ignore the create error
}
}
@AfterEach
public void removeTables() {
sql("DROP TABLE IF EXISTS %s", tableName);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInPartitionedTable() {
testDifferentDataTypesAggregatePushDown(true);
}
@TestTemplate
public void testDifferentDataTypesAggregatePushDownInNonPartitionedTable() {
testDifferentDataTypesAggregatePushDown(false);
}
@SuppressWarnings("checkstyle:CyclomaticComplexity")
private void testDifferentDataTypesAggregatePushDown(boolean hasPartitionCol) {
String createTable;
if (hasPartitionCol) {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg PARTITIONED BY (id)";
} else {
createTable =
"CREATE TABLE %s (id LONG, int_data INT, boolean_data BOOLEAN, float_data FLOAT, double_data DOUBLE, "
+ "decimal_data DECIMAL(14, 2), binary_data binary) USING iceberg";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES "
+ "(1, null, false, null, null, 11.11, X'1111'),"
+ " (1, null, true, 2.222, 2.222222, 22.22, X'2222'),"
+ " (2, 33, false, 3.333, 3.333333, 33.33, X'3333'),"
+ " (2, 44, true, null, 4.444444, 44.44, X'4444'),"
+ " (3, 55, false, 5.555, 5.555555, 55.55, X'5555'),"
+ " (3, null, true, null, 6.666666, 66.66, null) ",
tableName);
String select =
"SELECT count(*), max(id), min(id), count(id), "
+ "max(int_data), min(int_data), count(int_data), "
+ "max(boolean_data), min(boolean_data), count(boolean_data), "
+ "max(float_data), min(float_data), count(float_data), "
+ "max(double_data), min(double_data), count(double_data), "
+ "max(decimal_data), min(decimal_data), count(decimal_data), "
+ "max(binary_data), min(binary_data), count(binary_data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(*)")
&& explainString.contains("max(id)")
&& explainString.contains("min(id)")
&& explainString.contains("count(id)")
&& explainString.contains("max(int_data)")
&& explainString.contains("min(int_data)")
&& explainString.contains("count(int_data)")
&& explainString.contains("max(boolean_data)")
&& explainString.contains("min(boolean_data)")
&& explainString.contains("count(boolean_data)")
&& explainString.contains("max(float_data)")
&& explainString.contains("min(float_data)")
&& explainString.contains("count(float_data)")
&& explainString.contains("max(double_data)")
&& explainString.contains("min(double_data)")
&& explainString.contains("count(double_data)")
&& explainString.contains("max(decimal_data)")
&& explainString.contains("min(decimal_data)")
&& explainString.contains("count(decimal_data)")
&& explainString.contains("max(binary_data)")
&& explainString.contains("min(binary_data)")
&& explainString.contains("count(binary_data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
3L,
1L,
6L,
55,
33,
3L,
true,
false,
6L,
5.555f,
2.222f,
3L,
6.666666,
2.222222,
5L,
new BigDecimal("66.66"),
new BigDecimal("11.11"),
6L,
new byte[] {85, 85},
new byte[] {17, 17},
5L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testDateAndTimestampWithPartition() {
sql(
"CREATE TABLE %s (id bigint, data string, d date, ts timestamp) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, '1', date('2021-11-10'), null),"
+ "(1, '2', date('2021-11-11'), timestamp('2021-11-11 22:22:22')), "
+ "(2, '3', date('2021-11-12'), timestamp('2021-11-12 22:22:22')), "
+ "(2, '4', date('2021-11-13'), timestamp('2021-11-13 22:22:22')), "
+ "(3, '5', null, timestamp('2021-11-14 22:22:22')), "
+ "(3, '6', date('2021-11-14'), null)",
tableName);
String select = "SELECT max(d), min(d), count(d), max(ts), min(ts), count(ts) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(d)")
&& explainString.contains("min(d)")
&& explainString.contains("count(d)")
&& explainString.contains("max(ts)")
&& explainString.contains("min(ts)")
&& explainString.contains("count(ts)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
Date.valueOf("2021-11-14"),
Date.valueOf("2021-11-10"),
5L,
Timestamp.valueOf("2021-11-14 22:22:22.0"),
Timestamp.valueOf("2021-11-11 22:22:22.0"),
4L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregateNotPushDownIfOneCantPushDown() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
String select = "SELECT COUNT(data), SUM(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, 23331.0});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregatePushDownWithMetricsMode() {
sql("CREATE TABLE %s (id LONG, data DOUBLE) USING iceberg", tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "none");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "id", "counts");
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX + "data", "none");
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666)",
tableName);
String select1 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
// count(data) is not pushed down because the metrics mode is `none`
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(id) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// count(id) is pushed down because the metrics mode is `counts`
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
String select3 = "SELECT COUNT(id), MAX(id) FROM %s";
explainContainsPushDownAggregates = false;
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(id)")) {
explainContainsPushDownAggregates = true;
}
// COUNT(id), MAX(id) are not pushed down because MAX(id) is not pushed down (metrics mode is
// `counts`)
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, 3L});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregateNotPushDownForStringType() {
sql("CREATE TABLE %s (id LONG, data STRING) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, '1111'), (1, '2222'), (2, '3333'), (2, '4444'), (3, '5555'), (3, '6666') ",
tableName);
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "truncate(16)");
String select1 = "SELECT MAX(id), MAX(data) FROM %s";
List<Object[]> explain1 = sql("EXPLAIN " + select1, tableName);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString1.contains("max(id)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual1 = sql(select1, tableName);
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {3L, "6666"});
assertEquals("expected and actual should equal", expected1, actual1);
String select2 = "SELECT COUNT(data) FROM %s";
List<Object[]> explain2 = sql("EXPLAIN " + select2, tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString2.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual2 = sql(select2, tableName);
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {6L});
assertEquals("expected and actual should equal", expected2, actual2);
explainContainsPushDownAggregates = false;
sql(
"ALTER TABLE %s SET TBLPROPERTIES('%s' '%s')",
tableName, TableProperties.DEFAULT_WRITE_METRICS_MODE, "full");
String select3 = "SELECT count(data), max(data) FROM %s";
List<Object[]> explain3 = sql("EXPLAIN " + select3, tableName);
String explainString3 = explain3.get(0)[0].toString().toLowerCase(Locale.ROOT);
if (explainString3.contains("count(data)") && explainString3.contains("max(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual3 = sql(select3, tableName);
List<Object[]> expected3 = Lists.newArrayList();
expected3.add(new Object[] {6L, "6666"});
assertEquals("expected and actual should equal", expected3, actual3);
}
@TestTemplate
public void testAggregatePushDownWithDataFilter() {
testAggregatePushDownWithFilter(false);
}
@TestTemplate
public void testAggregatePushDownWithPartitionFilter() {
testAggregatePushDownWithFilter(true);
}
private void testAggregatePushDownWithFilter(boolean partitionFilerOnly) {
String createTable;
if (!partitionFilerOnly) {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg";
} else {
createTable = "CREATE TABLE %s (id LONG, data INT) USING iceberg PARTITIONED BY (id)";
}
sql(createTable, tableName);
sql(
"INSERT INTO TABLE %s VALUES"
+ " (1, 11),"
+ " (1, 22),"
+ " (2, 33),"
+ " (2, 44),"
+ " (3, 55),"
+ " (3, 66) ",
tableName);
String select = "SELECT MIN(data) FROM %s WHERE id > 1";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("min(data)")) {
explainContainsPushDownAggregates = true;
}
if (!partitionFilerOnly) {
// Filters are not completely pushed down, we can't push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
} else {
// Filters are not completely pushed down, we can push down aggregates
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
}
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {33});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testAggregateWithComplexType() {
sql("CREATE TABLE %s (id INT, complex STRUCT<c1:INT,c2:STRING>) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", 3, \"c2\", \"v1\")),"
+ "(2, named_struct(\"c1\", 2, \"c2\", \"v2\")), (3, null)",
tableName);
String select1 = "SELECT count(complex), count(id) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select1, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("count(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("count not pushed down for complex types")
.isFalse();
List<Object[]> actual = sql(select1, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {2L, 3L});
assertEquals("count not push down", actual, expected);
String select2 = "SELECT max(complex) FROM %s";
explain = sql("EXPLAIN " + select2, tableName);
explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
explainContainsPushDownAggregates = false;
if (explainString.contains("max(complex)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("max not pushed down for complex types")
.isFalse();
}
@TestTemplate
public void testAggregationPushdownStructInteger() {
sql("CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:BIGINT>) USING iceberg", tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1)",
"max(struct_with_int.c1)",
"min(struct_with_int.c1)");
}
@TestTemplate
public void testAggregationPushdownNestedStruct() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:STRUCT<c2:STRUCT<c3:STRUCT<c4:BIGINT>>>>) USING iceberg",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", NULL)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 2)))))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", named_struct(\"c2\", named_struct(\"c3\", named_struct(\"c4\", 3)))))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_int.c1.c2.c3.c4";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 3L, 2L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_int.c1.c2.c3.c4)",
"max(struct_with_int.c1.c2.c3.c4)",
"min(struct_with_int.c1.c2.c3.c4)");
}
@TestTemplate
public void testAggregationPushdownStructTimestamp() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_ts STRUCT<c1:TIMESTAMP>) USING iceberg",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql(
"INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", timestamp('2023-01-30T22:22:22Z')))",
tableName);
sql(
"INSERT INTO TABLE %s VALUES (3, named_struct(\"c1\", timestamp('2023-01-30T22:23:23Z')))",
tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "struct_with_ts.c1";
assertAggregates(
sql(query, aggField, aggField, aggField, tableName),
2L,
new Timestamp(1675117403000L),
new Timestamp(1675117342000L));
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(struct_with_ts.c1)",
"max(struct_with_ts.c1)",
"min(struct_with_ts.c1)");
}
@TestTemplate
public void testAggregationPushdownOnBucketedColumn() {
sql(
"CREATE TABLE %s (id BIGINT, struct_with_int STRUCT<c1:INT>) USING iceberg PARTITIONED BY (bucket(8, id))",
tableName);
sql("INSERT INTO TABLE %s VALUES (1, named_struct(\"c1\", NULL))", tableName);
sql("INSERT INTO TABLE %s VALUES (null, named_struct(\"c1\", 2))", tableName);
sql("INSERT INTO TABLE %s VALUES (2, named_struct(\"c1\", 3))", tableName);
String query = "SELECT COUNT(%s), MAX(%s), MIN(%s) FROM %s";
String aggField = "id";
assertAggregates(sql(query, aggField, aggField, aggField, tableName), 2L, 2L, 1L);
assertExplainContains(
sql("EXPLAIN " + query, aggField, aggField, aggField, tableName),
"count(id)",
"max(id)",
"min(id)");
}
private void assertAggregates(
List<Object[]> actual, Object expectedCount, Object expectedMax, Object expectedMin) {
Object actualCount = actual.get(0)[0];
Object actualMax = actual.get(0)[1];
Object actualMin = actual.get(0)[2];
assertThat(actualCount).as("Expected and actual count should equal").isEqualTo(expectedCount);
assertThat(actualMax).as("Expected and actual max should equal").isEqualTo(expectedMax);
assertThat(actualMin).as("Expected and actual min should equal").isEqualTo(expectedMin);
}
private void assertExplainContains(List<Object[]> explain, String... expectedFragments) {
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
Arrays.stream(expectedFragments)
.forEach(
fragment ->
assertThat(explainString)
.as("Expected to find plan fragment in explain plan")
.contains(fragment));
}
@TestTemplate
public void testAggregatePushDownInDeleteCopyOnWrite() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
sql("DELETE FROM %s WHERE data = 1111", tableName);
String select = "SELECT max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("min/max/count pushed down for deleted")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6666, 2222, 5L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForTimeTravel() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
List<Object[]> expected1 = sql("SELECT count(id) FROM %s", tableName);
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
List<Object[]> expected2 = sql("SELECT count(id) FROM %s", tableName);
List<Object[]> explain1 =
sql("EXPLAIN SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
String explainString1 = explain1.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates1 = false;
if (explainString1.contains("count(id)")) {
explainContainsPushDownAggregates1 = true;
}
assertThat(explainContainsPushDownAggregates1).as("count pushed down").isTrue();
List<Object[]> actual1 =
sql("SELECT count(id) FROM %s VERSION AS OF %s", tableName, snapshotId);
assertEquals("count push down", expected1, actual1);
List<Object[]> explain2 = sql("EXPLAIN SELECT count(id) FROM %s", tableName);
String explainString2 = explain2.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates2 = false;
if (explainString2.contains("count(id)")) {
explainContainsPushDownAggregates2 = true;
}
assertThat(explainContainsPushDownAggregates2).as("count pushed down").isTrue();
List<Object[]> actual2 = sql("SELECT count(id) FROM %s", tableName);
assertEquals("count push down", expected2, actual2);
}
@TestTemplate
public void testAllNull() {
sql("CREATE TABLE %s (id int, data int) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, null),"
+ "(1, null), "
+ "(2, null), "
+ "(2, null), "
+ "(3, null), "
+ "(3, null)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
&& explainString.contains("min(data)")
&& explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, null, null, 0L});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAllNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, float('nan')), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, float('nan'))",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, Float.NaN, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testNaN() {
sql("CREATE TABLE %s (id int, data float) USING iceberg PARTITIONED BY (id)", tableName);
sql(
"INSERT INTO %s VALUES (1, float('nan')),"
+ "(1, float('nan')), "
+ "(2, 2), "
+ "(2, float('nan')), "
+ "(3, float('nan')), "
+ "(3, 1)",
tableName);
String select = "SELECT count(*), max(data), min(data), count(data) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data)")
|| explainString.contains("min(data)")
|| explainString.contains("count(data)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should not contain the pushed down aggregates")
.isFalse();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(new Object[] {6L, Float.NaN, 1.0F, 6L});
assertEquals("expected and actual should equal", expected, actual);
}
@TestTemplate
public void testInfinity() {
sql(
"CREATE TABLE %s (id int, data1 float, data2 double, data3 double) USING iceberg PARTITIONED BY (id)",
tableName);
sql(
"INSERT INTO %s VALUES (1, float('-infinity'), double('infinity'), 1.23), "
+ "(1, float('-infinity'), double('infinity'), -1.23), "
+ "(1, float('-infinity'), double('infinity'), double('infinity')), "
+ "(1, float('-infinity'), double('infinity'), 2.23), "
+ "(1, float('-infinity'), double('infinity'), double('-infinity')), "
+ "(1, float('-infinity'), double('infinity'), -2.23)",
tableName);
String select =
"SELECT count(*), max(data1), min(data1), count(data1), max(data2), min(data2), count(data2), max(data3), min(data3), count(data3) FROM %s";
List<Object[]> explain = sql("EXPLAIN " + select, tableName);
String explainString = explain.get(0)[0].toString().toLowerCase(Locale.ROOT);
boolean explainContainsPushDownAggregates = false;
if (explainString.contains("max(data1)")
&& explainString.contains("min(data1)")
&& explainString.contains("count(data1)")
&& explainString.contains("max(data2)")
&& explainString.contains("min(data2)")
&& explainString.contains("count(data2)")
&& explainString.contains("max(data3)")
&& explainString.contains("min(data3)")
&& explainString.contains("count(data3)")) {
explainContainsPushDownAggregates = true;
}
assertThat(explainContainsPushDownAggregates)
.as("explain should contain the pushed down aggregates")
.isTrue();
List<Object[]> actual = sql(select, tableName);
List<Object[]> expected = Lists.newArrayList();
expected.add(
new Object[] {
6L,
Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.POSITIVE_INFINITY,
6L,
Double.POSITIVE_INFINITY,
Double.NEGATIVE_INFINITY,
6L
});
assertEquals("min/max/count push down", expected, actual);
}
@TestTemplate
public void testAggregatePushDownForIncrementalScan() {
sql("CREATE TABLE %s (id LONG, data INT) USING iceberg", tableName);
sql(
"INSERT INTO TABLE %s VALUES (1, 1111), (1, 2222), (2, 3333), (2, 4444), (3, 5555), (3, 6666) ",
tableName);
long snapshotId1 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (4, 7777), (5, 8888)", tableName);
long snapshotId2 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (6, -7777), (7, 8888)", tableName);
long snapshotId3 = validationCatalog.loadTable(tableIdent).currentSnapshot().snapshotId();
sql("INSERT INTO %s VALUES (8, 7777), (9, 9999)", tableName);
Dataset<Row> pushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId2)
.option(SparkReadOptions.END_SNAPSHOT_ID, snapshotId3)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain1 = pushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain1).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected1 = Lists.newArrayList();
expected1.add(new Object[] {-7777, 8888, 2L});
assertEquals("min/max/count push down", expected1, rowsToJava(pushdownDs.collectAsList()));
Dataset<Row> unboundedPushdownDs =
spark
.read()
.format("iceberg")
.option(SparkReadOptions.START_SNAPSHOT_ID, snapshotId1)
.load(tableName)
.agg(functions.min("data"), functions.max("data"), functions.count("data"));
String explain2 =
unboundedPushdownDs.queryExecution().explainString(ExplainMode.fromString("simple"));
assertThat(explain2).contains("LocalTableScan", "min(data)", "max(data)", "count(data)");
List<Object[]> expected2 = Lists.newArrayList();
expected2.add(new Object[] {-7777, 9999, 6L});
assertEquals(
"min/max/count push down", expected2, rowsToJava(unboundedPushdownDs.collectAsList()));
}
}
|
apache/james-project | 35,280 | third-party/rspamd/src/test/java/org/apache/james/rspamd/route/FeedMessageRouteTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.rspamd.route;
import static io.restassured.RestAssured.given;
import static io.restassured.http.ContentType.JSON;
import static org.apache.james.rspamd.RspamdExtension.PASSWORD;
import static org.apache.james.rspamd.route.FeedMessageRoute.BASE_PATH;
import static org.apache.james.rspamd.task.FeedHamToRspamdTaskTest.ALICE_INBOX_MAILBOX;
import static org.apache.james.rspamd.task.FeedHamToRspamdTaskTest.BOB_INBOX_MAILBOX;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.ALICE;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.ALICE_SPAM_MAILBOX;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.BOB;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.BOB_SPAM_MAILBOX;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.NOW;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.ONE_DAY_IN_SECOND;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.THREE_DAYS_IN_SECOND;
import static org.apache.james.rspamd.task.FeedSpamToRspamdTaskTest.TWO_DAYS_IN_SECOND;
import static org.eclipse.jetty.http.HttpStatus.BAD_REQUEST_400;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import java.io.ByteArrayInputStream;
import java.time.temporal.ChronoUnit;
import java.util.Date;
import java.util.Optional;
import java.util.stream.IntStream;
import jakarta.mail.Flags;
import org.apache.james.domainlist.api.DomainList;
import org.apache.james.json.DTOConverter;
import org.apache.james.junit.categories.Unstable;
import org.apache.james.mailbox.MailboxSession;
import org.apache.james.mailbox.MessageIdManager;
import org.apache.james.mailbox.exception.MailboxException;
import org.apache.james.mailbox.inmemory.InMemoryMailboxManager;
import org.apache.james.mailbox.inmemory.manager.InMemoryIntegrationResources;
import org.apache.james.mailbox.model.MailboxPath;
import org.apache.james.mailbox.store.MailboxSessionMapperFactory;
import org.apache.james.rspamd.RspamdExtension;
import org.apache.james.rspamd.client.RspamdClientConfiguration;
import org.apache.james.rspamd.client.RspamdHttpClient;
import org.apache.james.rspamd.task.FeedHamToRspamdTask;
import org.apache.james.rspamd.task.FeedHamToRspamdTaskAdditionalInformationDTO;
import org.apache.james.rspamd.task.FeedSpamToRspamdTask;
import org.apache.james.rspamd.task.FeedSpamToRspamdTaskAdditionalInformationDTO;
import org.apache.james.rspamd.task.RunningOptions;
import org.apache.james.task.Hostname;
import org.apache.james.task.MemoryTaskManager;
import org.apache.james.user.api.UsersRepository;
import org.apache.james.user.memory.MemoryUsersRepository;
import org.apache.james.util.DurationParser;
import org.apache.james.utils.UpdatableTickingClock;
import org.apache.james.webadmin.WebAdminServer;
import org.apache.james.webadmin.WebAdminUtils;
import org.apache.james.webadmin.routes.TasksRoutes;
import org.apache.james.webadmin.utils.JsonTransformer;
import org.eclipse.jetty.http.HttpStatus;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mockito;
import com.github.fge.lambdas.Throwing;
import io.restassured.RestAssured;
@Tag(Unstable.TAG)
public class FeedMessageRouteTest {
@RegisterExtension
static RspamdExtension rspamdExtension = new RspamdExtension();
private InMemoryMailboxManager mailboxManager;
private WebAdminServer webAdminServer;
private MemoryTaskManager taskManager;
private UpdatableTickingClock clock;
@BeforeEach
void setUp() throws Exception {
clock = new UpdatableTickingClock(NOW);
InMemoryIntegrationResources inMemoryIntegrationResources = InMemoryIntegrationResources.builder()
.preProvisionnedFakeAuthenticator()
.fakeAuthorizator()
.inVmEventBus()
.defaultAnnotationLimits()
.defaultMessageParser()
.scanningSearchIndex()
.noPreDeletionHooks()
.storeQuotaManager()
.updatableClock(clock)
.build();
mailboxManager = inMemoryIntegrationResources.getMailboxManager();
DomainList domainList = mock(DomainList.class);
Mockito.when(domainList.containsDomain(any())).thenReturn(true);
UsersRepository usersRepository = MemoryUsersRepository.withVirtualHosting(domainList);
usersRepository.addUser(BOB, "anyPassword");
usersRepository.addUser(ALICE, "anyPassword");
mailboxManager.createMailbox(BOB_SPAM_MAILBOX, mailboxManager.createSystemSession(BOB));
mailboxManager.createMailbox(BOB_INBOX_MAILBOX, mailboxManager.createSystemSession(BOB));
mailboxManager.createMailbox(ALICE_SPAM_MAILBOX, mailboxManager.createSystemSession(ALICE));
mailboxManager.createMailbox(ALICE_INBOX_MAILBOX, mailboxManager.createSystemSession(ALICE));
taskManager = new MemoryTaskManager(new Hostname("foo"));
JsonTransformer jsonTransformer = new JsonTransformer();
RspamdClientConfiguration rspamdConfiguration = new RspamdClientConfiguration(rspamdExtension.getBaseUrl(), PASSWORD, Optional.empty());
RspamdHttpClient client = new RspamdHttpClient(rspamdConfiguration);
MessageIdManager messageIdManager = inMemoryIntegrationResources.getMessageIdManager();
MailboxSessionMapperFactory mapperFactory = mailboxManager.getMapperFactory();
TasksRoutes tasksRoutes = new TasksRoutes(taskManager, jsonTransformer, DTOConverter.of(FeedSpamToRspamdTaskAdditionalInformationDTO.SERIALIZATION_MODULE,
FeedHamToRspamdTaskAdditionalInformationDTO.SERIALIZATION_MODULE));
FeedMessageRoute feedMessageRoute = new FeedMessageRoute(taskManager, mailboxManager, usersRepository, client, jsonTransformer, clock,
messageIdManager, mapperFactory, rspamdConfiguration);
webAdminServer = WebAdminUtils.createWebAdminServer(feedMessageRoute, tasksRoutes).start();
RestAssured.requestSpecification = WebAdminUtils.buildRequestSpecification(webAdminServer)
.setBasePath(BASE_PATH)
.build();
}
@AfterEach
void stop() {
webAdminServer.destroy();
taskManager.stop();
}
private void appendMessage(MailboxPath mailboxPath, Date internalDate) throws MailboxException {
MailboxSession session = mailboxManager.createSystemSession(mailboxPath.getUser());
mailboxManager.getMailbox(mailboxPath, session)
.appendMessage(new ByteArrayInputStream(String.format("random content %4.3f", Math.random()).getBytes()),
internalDate,
session,
true,
new Flags());
}
@Nested
class FeedSpam {
@Test
void taskShouldReportAllSpamMessagesOfAllUsersByDefault() throws MailboxException {
appendMessage(BOB_SPAM_MAILBOX, Date.from(NOW));
appendMessage(ALICE_SPAM_MAILBOX, Date.from(NOW));
String taskId = given()
.queryParam("action", "reportSpam")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.spamMessageCount", is(2))
.body("additionalInformation.reportedSpamMessageCount", is(2))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.rspamdTimeoutInSeconds", is((int) RunningOptions.DEFAULT_RSPAMD_TIMEOUT.toSeconds()))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskShouldDisplayClassifiedAsSpamRunningOption() throws MailboxException {
appendMessage(BOB_SPAM_MAILBOX, Date.from(NOW));
appendMessage(ALICE_SPAM_MAILBOX, Date.from(NOW));
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("classifiedAsSpam", "false")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.spamMessageCount", is(2))
.body("additionalInformation.reportedSpamMessageCount", is(2))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.classifiedAsSpam", is(false))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskShouldCountAndReportOnlyMailInPeriod() throws MailboxException {
clock.setInstant(NOW.minusSeconds(THREE_DAYS_IN_SECOND));
appendMessage(BOB_SPAM_MAILBOX, Date.from(NOW.minusSeconds(THREE_DAYS_IN_SECOND)));
clock.setInstant(NOW.minusSeconds(ONE_DAY_IN_SECOND));
appendMessage(ALICE_SPAM_MAILBOX, Date.from(NOW.minusSeconds(ONE_DAY_IN_SECOND)));
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("period", TWO_DAYS_IN_SECOND)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.spamMessageCount", is(1))
.body("additionalInformation.reportedSpamMessageCount", is(1))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(172800))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskWithAverageSamplingProbabilityShouldNotReportAllSpamMessages() {
IntStream.range(0, 10)
.forEach(Throwing.intConsumer(any -> appendMessage(BOB_SPAM_MAILBOX, Date.from(NOW.minusSeconds(ONE_DAY_IN_SECOND)))));
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("samplingProbability", 0.5)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.spamMessageCount", is(10))
.body("additionalInformation.reportedSpamMessageCount", is(allOf(greaterThan(0), lessThan(10))))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is(0.5F));
}
@Test
void feedMessageShouldReturnErrorWhenInvalidAction() {
given()
.queryParam("action", "invalid")
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", is("'action' is missing or must be 'reportSpam' or 'reportHam'"));
}
@Test
void feedMessageTaskShouldReturnErrorWhenMissingAction() {
given()
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", is("'action' is missing or must be 'reportSpam' or 'reportHam'"));
}
@Test
void feedSpamShouldReturnTaskId() {
given()
.queryParam("action", "reportSpam")
.post()
.then()
.statusCode(HttpStatus.CREATED_201)
.body("taskId", notNullValue());
}
@Test
void feedSpamShouldReturnDetail() {
String taskId = given()
.queryParam("action", "reportSpam")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("taskId", is(notNullValue()))
.body("type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("startedDate", is(notNullValue()))
.body("submitDate", is(notNullValue()))
.body("completedDate", is(notNullValue()))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.timestamp", is(notNullValue()))
.body("additionalInformation.spamMessageCount", is(0))
.body("additionalInformation.reportedSpamMessageCount", is(0))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@ParameterizedTest
@ValueSource(strings = {"3600", "3600 seconds", "1d", "1day"})
void feedSpamShouldAcceptPeriodParam(String period) {
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("period", period)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.periodInSecond", is((int) DurationParser.parse(period, ChronoUnit.SECONDS).toSeconds()));
}
@ParameterizedTest
@ValueSource(strings = {"-1", "0", "1 t"})
void feedSpamShouldReturnErrorWhenPeriodInvalid(String period) {
given()
.queryParam("action", "reportSpam")
.queryParam("period", period)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"));
}
@Test
void feedSpamShouldAcceptMessagesPerSecondParam() {
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("messagesPerSecond", 20)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.messagesPerSecond", is(20));
}
@ParameterizedTest
@ValueSource(doubles = {-1, -0.1, 1.1})
void feedSpamShouldReturnErrorWhenMessagesPerSecondInvalid(double messagesPerSecond) {
given()
.queryParam("action", "reportSpam")
.queryParam("messagesPerSecond", messagesPerSecond)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", containsString("messagesPerSecond"));
}
@Test
void feedSpamShouldAcceptSamplingProbabilityParam() {
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("samplingProbability", 0.8)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.samplingProbability", is(0.8F));
}
@ParameterizedTest
@ValueSource(doubles = {-1, -0.1, 1.1})
void feedSpamShouldReturnErrorWhenSamplingProbabilityInvalid(double samplingProbability) {
given()
.queryParam("action", "reportSpam")
.queryParam("samplingProbability", samplingProbability)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", containsString("samplingProbability"));
}
}
@Nested
class FeedHam {
@Test
void taskShouldReportAllHamMessagesOfAllUsersByDefault() throws MailboxException {
appendMessage(BOB_INBOX_MAILBOX, Date.from(NOW));
appendMessage(ALICE_INBOX_MAILBOX, Date.from(NOW));
String taskId = given()
.queryParam("action", "reportHam")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.hamMessageCount", is(2))
.body("additionalInformation.reportedHamMessageCount", is(2))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.rspamdTimeoutInSeconds", is((int) RunningOptions.DEFAULT_RSPAMD_TIMEOUT.toSeconds()))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskShouldDisplayClassifiedAsSpamRunningOption() throws MailboxException {
appendMessage(BOB_INBOX_MAILBOX, Date.from(NOW));
appendMessage(ALICE_INBOX_MAILBOX, Date.from(NOW));
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("classifiedAsSpam", "true")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.hamMessageCount", is(2))
.body("additionalInformation.reportedHamMessageCount", is(2))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.classifiedAsSpam", is(true))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskShouldCountAndReportOnlyMailInPeriod() throws MailboxException {
clock.setInstant(NOW.minusSeconds(THREE_DAYS_IN_SECOND));
appendMessage(BOB_INBOX_MAILBOX, Date.from(NOW.minusSeconds(THREE_DAYS_IN_SECOND)));
clock.setInstant(NOW.minusSeconds(ONE_DAY_IN_SECOND));
appendMessage(ALICE_INBOX_MAILBOX, Date.from(NOW.minusSeconds(ONE_DAY_IN_SECOND)));
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("period", TWO_DAYS_IN_SECOND)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.hamMessageCount", is(1))
.body("additionalInformation.reportedHamMessageCount", is(1))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(172800))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@Test
void taskWithAverageSamplingProbabilityShouldNotReportAllHamMessages() {
IntStream.range(0, 10)
.forEach(Throwing.intConsumer(any -> appendMessage(BOB_INBOX_MAILBOX, Date.from(NOW.minusSeconds(ONE_DAY_IN_SECOND)))));
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("samplingProbability", 0.5)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.hamMessageCount", is(10))
.body("additionalInformation.reportedHamMessageCount", is(allOf(greaterThan(0), lessThan(10))))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is(0.5F));
}
@Test
void feedMessageShouldReturnErrorWhenInvalidAction() {
given()
.queryParam("action", "invalid")
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", is("'action' is missing or must be 'reportSpam' or 'reportHam'"));
}
@Test
void feedMessageTaskShouldReturnErrorWhenMissingAction() {
given()
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", is("'action' is missing or must be 'reportSpam' or 'reportHam'"));
}
@Test
void feedHamShouldReturnTaskId() {
given()
.queryParam("action", "reportHam")
.post()
.then()
.statusCode(HttpStatus.CREATED_201)
.body("taskId", notNullValue());
}
@Test
void feedHamShouldReturnDetail() {
String taskId = given()
.queryParam("action", "reportHam")
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("taskId", is(notNullValue()))
.body("type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("startedDate", is(notNullValue()))
.body("submitDate", is(notNullValue()))
.body("completedDate", is(notNullValue()))
.body("additionalInformation.type", is(FeedHamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.timestamp", is(notNullValue()))
.body("additionalInformation.hamMessageCount", is(0))
.body("additionalInformation.reportedHamMessageCount", is(0))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
@ParameterizedTest
@ValueSource(strings = {"3600", "3600 seconds", "1d", "1day"})
void feedHamShouldAcceptPeriodParam(String period) {
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("period", period)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.periodInSecond", is((int) DurationParser.parse(period, ChronoUnit.SECONDS).toSeconds()));
}
@ParameterizedTest
@ValueSource(strings = {"-1", "0", "1 t"})
void feedHamShouldReturnErrorWhenPeriodInvalid(String period) {
given()
.queryParam("action", "reportHam")
.queryParam("period", period)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"));
}
@Test
void feedHamShouldAcceptMessagesPerSecondParam() {
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("messagesPerSecond", 20)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.messagesPerSecond", is(20));
}
@ParameterizedTest
@ValueSource(doubles = {-1, -0.1, 1.1})
void feedHamShouldReturnErrorWhenMessagesPerSecondInvalid(double messagesPerSecond) {
given()
.queryParam("action", "reportHam")
.queryParam("messagesPerSecond", messagesPerSecond)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", containsString("messagesPerSecond"));
}
@Test
void feedHamShouldAcceptSamplingProbabilityParam() {
String taskId = given()
.queryParam("action", "reportHam")
.queryParam("samplingProbability", 0.8)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("additionalInformation.runningOptions.samplingProbability", is(0.8F));
}
@ParameterizedTest
@ValueSource(doubles = {-1, -0.1, 1.1})
void feedHamShouldReturnErrorWhenSamplingProbabilityInvalid(double samplingProbability) {
given()
.queryParam("action", "reportHam")
.queryParam("samplingProbability", samplingProbability)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"))
.body("details", containsString("samplingProbability"));
}
}
@ParameterizedTest
@ValueSource(doubles = {-1, -0.1, 1.1})
void routeShouldReturnErrorWhenRspamdTimeoutInvalid(double rspamdTimeout) {
given()
.queryParam("action", "reportSpam")
.queryParam("rspamdTimeout", rspamdTimeout)
.post()
.then()
.statusCode(BAD_REQUEST_400)
.contentType(JSON)
.body("statusCode", is(BAD_REQUEST_400))
.body("type", is("InvalidArgument"))
.body("message", is("Invalid arguments supplied in the user request"));
}
@Test
void taskShouldDisplayRspamdTimeoutAsSpamRunningOption() {
String taskId = given()
.queryParam("action", "reportSpam")
.queryParam("rspamdTimeout", 13)
.post()
.jsonPath()
.get("taskId");
given()
.basePath(TasksRoutes.BASE)
.when()
.get(taskId + "/await")
.then()
.body("status", is("completed"))
.body("additionalInformation.type", is(FeedSpamToRspamdTask.TASK_TYPE.asString()))
.body("additionalInformation.spamMessageCount", is(0))
.body("additionalInformation.reportedSpamMessageCount", is(0))
.body("additionalInformation.errorCount", is(0))
.body("additionalInformation.runningOptions.messagesPerSecond", is(RunningOptions.DEFAULT_MESSAGES_PER_SECOND))
.body("additionalInformation.runningOptions.rspamdTimeoutInSeconds", is(13))
.body("additionalInformation.runningOptions.periodInSecond", is(nullValue()))
.body("additionalInformation.runningOptions.samplingProbability", is((float) RunningOptions.DEFAULT_SAMPLING_PROBABILITY));
}
}
|
googleapis/google-cloud-java | 35,468 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/ListIssuesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* The response to list issues.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListIssuesResponse}
*/
public final class ListIssuesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.ListIssuesResponse)
ListIssuesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIssuesResponse.newBuilder() to construct.
private ListIssuesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIssuesResponse() {
issues_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIssuesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssuesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssuesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListIssuesResponse.class,
com.google.cloud.securesourcemanager.v1.ListIssuesResponse.Builder.class);
}
public static final int ISSUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securesourcemanager.v1.Issue> issues_;
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securesourcemanager.v1.Issue> getIssuesList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.IssueOrBuilder>
getIssuesOrBuilderList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
@java.lang.Override
public int getIssuesCount() {
return issues_.size();
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.Issue getIssues(int index) {
return issues_.get(index);
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.IssueOrBuilder getIssuesOrBuilder(int index) {
return issues_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < issues_.size(); i++) {
output.writeMessage(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < issues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.ListIssuesResponse)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.ListIssuesResponse other =
(com.google.cloud.securesourcemanager.v1.ListIssuesResponse) obj;
if (!getIssuesList().equals(other.getIssuesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIssuesCount() > 0) {
hash = (37 * hash) + ISSUES_FIELD_NUMBER;
hash = (53 * hash) + getIssuesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.ListIssuesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response to list issues.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListIssuesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.ListIssuesResponse)
com.google.cloud.securesourcemanager.v1.ListIssuesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssuesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssuesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListIssuesResponse.class,
com.google.cloud.securesourcemanager.v1.ListIssuesResponse.Builder.class);
}
// Construct using com.google.cloud.securesourcemanager.v1.ListIssuesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
} else {
issues_ = null;
issuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssuesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssuesResponse getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.ListIssuesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssuesResponse build() {
com.google.cloud.securesourcemanager.v1.ListIssuesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssuesResponse buildPartial() {
com.google.cloud.securesourcemanager.v1.ListIssuesResponse result =
new com.google.cloud.securesourcemanager.v1.ListIssuesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securesourcemanager.v1.ListIssuesResponse result) {
if (issuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
issues_ = java.util.Collections.unmodifiableList(issues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.issues_ = issues_;
} else {
result.issues_ = issuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.securesourcemanager.v1.ListIssuesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.ListIssuesResponse) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.ListIssuesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.ListIssuesResponse other) {
if (other == com.google.cloud.securesourcemanager.v1.ListIssuesResponse.getDefaultInstance())
return this;
if (issuesBuilder_ == null) {
if (!other.issues_.isEmpty()) {
if (issues_.isEmpty()) {
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIssuesIsMutable();
issues_.addAll(other.issues_);
}
onChanged();
}
} else {
if (!other.issues_.isEmpty()) {
if (issuesBuilder_.isEmpty()) {
issuesBuilder_.dispose();
issuesBuilder_ = null;
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
issuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIssuesFieldBuilder()
: null;
} else {
issuesBuilder_.addAllMessages(other.issues_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securesourcemanager.v1.Issue m =
input.readMessage(
com.google.cloud.securesourcemanager.v1.Issue.parser(), extensionRegistry);
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(m);
} else {
issuesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securesourcemanager.v1.Issue> issues_ =
java.util.Collections.emptyList();
private void ensureIssuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
issues_ = new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.Issue>(issues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Issue,
com.google.cloud.securesourcemanager.v1.Issue.Builder,
com.google.cloud.securesourcemanager.v1.IssueOrBuilder>
issuesBuilder_;
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.Issue> getIssuesList() {
if (issuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(issues_);
} else {
return issuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public int getIssuesCount() {
if (issuesBuilder_ == null) {
return issues_.size();
} else {
return issuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Issue getIssues(int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder setIssues(int index, com.google.cloud.securesourcemanager.v1.Issue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.set(index, value);
onChanged();
} else {
issuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder setIssues(
int index, com.google.cloud.securesourcemanager.v1.Issue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.set(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder addIssues(com.google.cloud.securesourcemanager.v1.Issue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(value);
onChanged();
} else {
issuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder addIssues(int index, com.google.cloud.securesourcemanager.v1.Issue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(index, value);
onChanged();
} else {
issuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder addIssues(
com.google.cloud.securesourcemanager.v1.Issue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder addIssues(
int index, com.google.cloud.securesourcemanager.v1.Issue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder addAllIssues(
java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.Issue> values) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, issues_);
onChanged();
} else {
issuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder clearIssues() {
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
issuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public Builder removeIssues(int index) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.remove(index);
onChanged();
} else {
issuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Issue.Builder getIssuesBuilder(int index) {
return getIssuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueOrBuilder getIssuesOrBuilder(int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.IssueOrBuilder>
getIssuesOrBuilderList() {
if (issuesBuilder_ != null) {
return issuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(issues_);
}
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Issue.Builder addIssuesBuilder() {
return getIssuesFieldBuilder()
.addBuilder(com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Issue.Builder addIssuesBuilder(int index) {
return getIssuesFieldBuilder()
.addBuilder(index, com.google.cloud.securesourcemanager.v1.Issue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Issue issues = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.Issue.Builder>
getIssuesBuilderList() {
return getIssuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Issue,
com.google.cloud.securesourcemanager.v1.Issue.Builder,
com.google.cloud.securesourcemanager.v1.IssueOrBuilder>
getIssuesFieldBuilder() {
if (issuesBuilder_ == null) {
issuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Issue,
com.google.cloud.securesourcemanager.v1.Issue.Builder,
com.google.cloud.securesourcemanager.v1.IssueOrBuilder>(
issues_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
issues_ = null;
}
return issuesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.ListIssuesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.ListIssuesResponse)
private static final com.google.cloud.securesourcemanager.v1.ListIssuesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.ListIssuesResponse();
}
public static com.google.cloud.securesourcemanager.v1.ListIssuesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIssuesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIssuesResponse>() {
@java.lang.Override
public ListIssuesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIssuesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIssuesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssuesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 35,517 | java-common-protos/proto-google-common-protos/src/main/java/com/google/api/Control.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/control.proto
// Protobuf Java Version: 3.25.8
package com.google.api;
/**
*
*
* <pre>
* Selects and configures the service controller used by the service.
*
* Example:
*
* control:
* environment: servicecontrol.googleapis.com
* </pre>
*
* Protobuf type {@code google.api.Control}
*/
public final class Control extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.Control)
ControlOrBuilder {
private static final long serialVersionUID = 0L;
// Use Control.newBuilder() to construct.
private Control(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Control() {
environment_ = "";
methodPolicies_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Control();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.ControlProto.internal_static_google_api_Control_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ControlProto.internal_static_google_api_Control_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Control.class, com.google.api.Control.Builder.class);
}
public static final int ENVIRONMENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object environment_ = "";
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @return The environment.
*/
@java.lang.Override
public java.lang.String getEnvironment() {
java.lang.Object ref = environment_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environment_ = s;
return s;
}
}
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @return The bytes for environment.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEnvironmentBytes() {
java.lang.Object ref = environment_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int METHOD_POLICIES_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private java.util.List<com.google.api.MethodPolicy> methodPolicies_;
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.MethodPolicy> getMethodPoliciesList() {
return methodPolicies_;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.MethodPolicyOrBuilder>
getMethodPoliciesOrBuilderList() {
return methodPolicies_;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
@java.lang.Override
public int getMethodPoliciesCount() {
return methodPolicies_.size();
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
@java.lang.Override
public com.google.api.MethodPolicy getMethodPolicies(int index) {
return methodPolicies_.get(index);
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
@java.lang.Override
public com.google.api.MethodPolicyOrBuilder getMethodPoliciesOrBuilder(int index) {
return methodPolicies_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, environment_);
}
for (int i = 0; i < methodPolicies_.size(); i++) {
output.writeMessage(4, methodPolicies_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, environment_);
}
for (int i = 0; i < methodPolicies_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, methodPolicies_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.Control)) {
return super.equals(obj);
}
com.google.api.Control other = (com.google.api.Control) obj;
if (!getEnvironment().equals(other.getEnvironment())) return false;
if (!getMethodPoliciesList().equals(other.getMethodPoliciesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER;
hash = (53 * hash) + getEnvironment().hashCode();
if (getMethodPoliciesCount() > 0) {
hash = (37 * hash) + METHOD_POLICIES_FIELD_NUMBER;
hash = (53 * hash) + getMethodPoliciesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.Control parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Control parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Control parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Control parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Control parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Control parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Control parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.Control parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.Control parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.Control parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.Control parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.Control parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.Control prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Selects and configures the service controller used by the service.
*
* Example:
*
* control:
* environment: servicecontrol.googleapis.com
* </pre>
*
* Protobuf type {@code google.api.Control}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.Control)
com.google.api.ControlOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.ControlProto.internal_static_google_api_Control_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ControlProto.internal_static_google_api_Control_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Control.class, com.google.api.Control.Builder.class);
}
// Construct using com.google.api.Control.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
environment_ = "";
if (methodPoliciesBuilder_ == null) {
methodPolicies_ = java.util.Collections.emptyList();
} else {
methodPolicies_ = null;
methodPoliciesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.ControlProto.internal_static_google_api_Control_descriptor;
}
@java.lang.Override
public com.google.api.Control getDefaultInstanceForType() {
return com.google.api.Control.getDefaultInstance();
}
@java.lang.Override
public com.google.api.Control build() {
com.google.api.Control result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.Control buildPartial() {
com.google.api.Control result = new com.google.api.Control(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.api.Control result) {
if (methodPoliciesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
methodPolicies_ = java.util.Collections.unmodifiableList(methodPolicies_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.methodPolicies_ = methodPolicies_;
} else {
result.methodPolicies_ = methodPoliciesBuilder_.build();
}
}
private void buildPartial0(com.google.api.Control result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.environment_ = environment_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.Control) {
return mergeFrom((com.google.api.Control) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.Control other) {
if (other == com.google.api.Control.getDefaultInstance()) return this;
if (!other.getEnvironment().isEmpty()) {
environment_ = other.environment_;
bitField0_ |= 0x00000001;
onChanged();
}
if (methodPoliciesBuilder_ == null) {
if (!other.methodPolicies_.isEmpty()) {
if (methodPolicies_.isEmpty()) {
methodPolicies_ = other.methodPolicies_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMethodPoliciesIsMutable();
methodPolicies_.addAll(other.methodPolicies_);
}
onChanged();
}
} else {
if (!other.methodPolicies_.isEmpty()) {
if (methodPoliciesBuilder_.isEmpty()) {
methodPoliciesBuilder_.dispose();
methodPoliciesBuilder_ = null;
methodPolicies_ = other.methodPolicies_;
bitField0_ = (bitField0_ & ~0x00000002);
methodPoliciesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMethodPoliciesFieldBuilder()
: null;
} else {
methodPoliciesBuilder_.addAllMessages(other.methodPolicies_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
environment_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 34:
{
com.google.api.MethodPolicy m =
input.readMessage(com.google.api.MethodPolicy.parser(), extensionRegistry);
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
methodPolicies_.add(m);
} else {
methodPoliciesBuilder_.addMessage(m);
}
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object environment_ = "";
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @return The environment.
*/
public java.lang.String getEnvironment() {
java.lang.Object ref = environment_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environment_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @return The bytes for environment.
*/
public com.google.protobuf.ByteString getEnvironmentBytes() {
java.lang.Object ref = environment_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @param value The environment to set.
* @return This builder for chaining.
*/
public Builder setEnvironment(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
environment_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnvironment() {
environment_ = getDefaultInstance().getEnvironment();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The service controller environment to use. If empty, no control plane
* feature (like quota and billing) will be enabled. The recommended value for
* most services is servicecontrol.googleapis.com
* </pre>
*
* <code>string environment = 1;</code>
*
* @param value The bytes for environment to set.
* @return This builder for chaining.
*/
public Builder setEnvironmentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
environment_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.api.MethodPolicy> methodPolicies_ =
java.util.Collections.emptyList();
private void ensureMethodPoliciesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
methodPolicies_ = new java.util.ArrayList<com.google.api.MethodPolicy>(methodPolicies_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.MethodPolicy,
com.google.api.MethodPolicy.Builder,
com.google.api.MethodPolicyOrBuilder>
methodPoliciesBuilder_;
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public java.util.List<com.google.api.MethodPolicy> getMethodPoliciesList() {
if (methodPoliciesBuilder_ == null) {
return java.util.Collections.unmodifiableList(methodPolicies_);
} else {
return methodPoliciesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public int getMethodPoliciesCount() {
if (methodPoliciesBuilder_ == null) {
return methodPolicies_.size();
} else {
return methodPoliciesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public com.google.api.MethodPolicy getMethodPolicies(int index) {
if (methodPoliciesBuilder_ == null) {
return methodPolicies_.get(index);
} else {
return methodPoliciesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder setMethodPolicies(int index, com.google.api.MethodPolicy value) {
if (methodPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMethodPoliciesIsMutable();
methodPolicies_.set(index, value);
onChanged();
} else {
methodPoliciesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder setMethodPolicies(
int index, com.google.api.MethodPolicy.Builder builderForValue) {
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
methodPolicies_.set(index, builderForValue.build());
onChanged();
} else {
methodPoliciesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder addMethodPolicies(com.google.api.MethodPolicy value) {
if (methodPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMethodPoliciesIsMutable();
methodPolicies_.add(value);
onChanged();
} else {
methodPoliciesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder addMethodPolicies(int index, com.google.api.MethodPolicy value) {
if (methodPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMethodPoliciesIsMutable();
methodPolicies_.add(index, value);
onChanged();
} else {
methodPoliciesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder addMethodPolicies(com.google.api.MethodPolicy.Builder builderForValue) {
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
methodPolicies_.add(builderForValue.build());
onChanged();
} else {
methodPoliciesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder addMethodPolicies(
int index, com.google.api.MethodPolicy.Builder builderForValue) {
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
methodPolicies_.add(index, builderForValue.build());
onChanged();
} else {
methodPoliciesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder addAllMethodPolicies(
java.lang.Iterable<? extends com.google.api.MethodPolicy> values) {
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, methodPolicies_);
onChanged();
} else {
methodPoliciesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder clearMethodPolicies() {
if (methodPoliciesBuilder_ == null) {
methodPolicies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
methodPoliciesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public Builder removeMethodPolicies(int index) {
if (methodPoliciesBuilder_ == null) {
ensureMethodPoliciesIsMutable();
methodPolicies_.remove(index);
onChanged();
} else {
methodPoliciesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public com.google.api.MethodPolicy.Builder getMethodPoliciesBuilder(int index) {
return getMethodPoliciesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public com.google.api.MethodPolicyOrBuilder getMethodPoliciesOrBuilder(int index) {
if (methodPoliciesBuilder_ == null) {
return methodPolicies_.get(index);
} else {
return methodPoliciesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public java.util.List<? extends com.google.api.MethodPolicyOrBuilder>
getMethodPoliciesOrBuilderList() {
if (methodPoliciesBuilder_ != null) {
return methodPoliciesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(methodPolicies_);
}
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public com.google.api.MethodPolicy.Builder addMethodPoliciesBuilder() {
return getMethodPoliciesFieldBuilder()
.addBuilder(com.google.api.MethodPolicy.getDefaultInstance());
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public com.google.api.MethodPolicy.Builder addMethodPoliciesBuilder(int index) {
return getMethodPoliciesFieldBuilder()
.addBuilder(index, com.google.api.MethodPolicy.getDefaultInstance());
}
/**
*
*
* <pre>
* Defines policies applying to the API methods of the service.
* </pre>
*
* <code>repeated .google.api.MethodPolicy method_policies = 4;</code>
*/
public java.util.List<com.google.api.MethodPolicy.Builder> getMethodPoliciesBuilderList() {
return getMethodPoliciesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.MethodPolicy,
com.google.api.MethodPolicy.Builder,
com.google.api.MethodPolicyOrBuilder>
getMethodPoliciesFieldBuilder() {
if (methodPoliciesBuilder_ == null) {
methodPoliciesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.MethodPolicy,
com.google.api.MethodPolicy.Builder,
com.google.api.MethodPolicyOrBuilder>(
methodPolicies_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
methodPolicies_ = null;
}
return methodPoliciesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.Control)
}
// @@protoc_insertion_point(class_scope:google.api.Control)
private static final com.google.api.Control DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.Control();
}
public static com.google.api.Control getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Control> PARSER =
new com.google.protobuf.AbstractParser<Control>() {
@java.lang.Override
public Control parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Control> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Control> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.Control getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 35,320 | jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/javax/xml/parsers/DocumentBuilderFactoryTest.java | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.tests.javax.xml.parsers;
import junit.framework.TestCase;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.SchemaFactoryLoader;
public class DocumentBuilderFactoryTest extends TestCase {
DocumentBuilderFactory dbf;
List<String> cdataElements;
List<String> textElements;
List<String> commentElements;
protected void setUp() throws Exception {
super.setUp();
dbf = DocumentBuilderFactory.newInstance();
cdataElements = new ArrayList<String>();
textElements = new ArrayList<String>();
commentElements = new ArrayList<String>();
}
protected void tearDown() throws Exception {
dbf = null;
cdataElements = null;
textElements = null;
commentElements = null;
super.tearDown();
}
/**
* javax.xml.parsers.DocumentBuilderFactory#DocumentBuilderFactory().
*/
public void test_Constructor() {
try {
new DocumentBuilderFactoryChild();
} catch (Exception e) {
fail("Unexpected exception " + e.toString());
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#getAttribute(String).
*/
// public void test_getAttributeLjava_lang_String() {
// String[] attributes = {
// "http://java.sun.com/xml/jaxp/properties/schemaLanguage",
// "http://java.sun.com/xml/jaxp/properties/schemaSource" };
// Object[] values = { "http://www.w3.org/2001/XMLSchema", "source" };
//
// try {
// for (int i = 0; i < attributes.length; i++) {
// dbf.setAttribute(attributes[i], values[i]);
// assertEquals(values[i], dbf.getAttribute(attributes[i]));
// }
// } catch (IllegalArgumentException e) {
// fail("Unexpected IllegalArgumentException" + e.getMessage());
// } catch (Exception e) {
// fail("Unexpected exception" + e.getMessage());
// }
//
// try {
// for (int i = 0; i < attributes.length; i++) {
// dbf.setAttribute(null, null);
// fail("NullPointerException expected");
// }
// } catch (NullPointerException e) {
// // expected
// }
//
// String[] badAttributes = {"bad1", "bad2", ""};
// try {
// for (int i = 0; i < badAttributes.length; i++) {
// dbf.getAttribute(badAttributes[i]);
// fail("IllegalArgumentException expected");
// }
// } catch (IllegalArgumentException e) {
// // expected
// }
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#getFeature(String).
*/
// TODO Fails on JDK. Why?
// public void test_getFeatureLjava_lang_String() {
// String[] features = { "http://xml.org/sax/features/namespaces",
// "http://xml.org/sax/features/validation",
// "http://xml.org/sax/features/external-general-entities" };
// try {
// for (int i = 0; i < features.length; i++) {
// dbf.setFeature(features[i], true);
// assertTrue(dbf.getFeature(features[i]));
// }
// } catch (ParserConfigurationException e) {
// fail("Unexpected ParserConfigurationException " + e.getMessage());
// }
//
// try {
// for (int i = 0; i < features.length; i++) {
// dbf.setFeature(features[i], false);
// assertFalse(dbf.getFeature(features[i]));
// }
// } catch (ParserConfigurationException e) {
// fail("Unexpected ParserConfigurationException " + e.getMessage());
// }
//
// try {
// for (int i = 0; i < features.length; i++) {
// dbf.setFeature(null, false);
// fail("NullPointerException expected");
// }
// } catch (NullPointerException e) {
// // expected
// } catch (ParserConfigurationException e) {
// fail("Unexpected ParserConfigurationException" + e.getMessage());
// }
//
// String[] badFeatures = {"bad1", "bad2", ""};
// try {
// for (int i = 0; i < badFeatures.length; i++) {
// dbf.getFeature(badFeatures[i]);
// fail("ParserConfigurationException expected");
// }
// } catch (ParserConfigurationException e) {
// // expected
// }
//
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#getSchema().
* TBD getSchemas() IS NOT SUPPORTED
*/
/* public void test_getSchema() {
assertNull(dbf.getSchema());
SchemaFactory sf =
SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
try {
Schema schema = sf.newSchema();
dbf.setSchema(schema);
assertNotNull(dbf.getSchema());
} catch (SAXException sax) {
fail("Unexpected exception " + sax.toString());
}
}
*/
/**
* javax.xml.parsers.DocumentBuilderFactory#isCoalescing().
*/
public void test_isCoalescing() {
dbf.setCoalescing(true);
assertTrue(dbf.isCoalescing());
dbf.setCoalescing(false);
assertFalse(dbf.isCoalescing());
}
/**
* javax.xml.parsers.DocumentBuilderFactory#isExpandEntityReferences().
*/
public void test_isExpandEntityReferences() {
dbf.setExpandEntityReferences(true);
assertTrue(dbf.isExpandEntityReferences());
dbf.setExpandEntityReferences(false);
assertFalse(dbf.isExpandEntityReferences());
}
/**
* javax.xml.parsers.DocumentBuilderFactory#isIgnoringComments().
*/
public void test_isIgnoringComments() {
dbf.setIgnoringComments(true);
assertTrue(dbf.isIgnoringComments());
dbf.setIgnoringComments(false);
assertFalse(dbf.isIgnoringComments());
}
/**
* javax.xml.parsers.DocumentBuilderFactory#isIgnoringElementContentWhitespace().
*/
public void test_isIgnoringElementContentWhitespace() {
dbf.setIgnoringElementContentWhitespace(true);
assertTrue(dbf.isIgnoringElementContentWhitespace());
dbf.setIgnoringElementContentWhitespace(false);
assertFalse(dbf.isIgnoringElementContentWhitespace());
}
/**
* javax.xml.parsers.DocumentBuilderFactory#isNamespaceAware().
*/
public void test_isNamespaceAware() {
dbf.setNamespaceAware(true);
assertTrue(dbf.isNamespaceAware());
dbf.setNamespaceAware(false);
assertFalse(dbf.isNamespaceAware());
}
public void test_setIsValidating() {
dbf.setValidating(true);
assertTrue(dbf.isValidating());
dbf.setValidating(false);
assertFalse(dbf.isValidating());
}
//TODO(b/314228778): Fix JavaLangUnsupportedOperationException:
// This parser does not support specification
// public void test_isSetXIncludeAware() {
// dbf.setXIncludeAware(true);
// assertTrue(dbf.isXIncludeAware());
// dbf.setXIncludeAware(false);
// assertFalse(dbf.isXIncludeAware());
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#newInstance().
*/
public void test_newInstance() {
String className = null;
try {
// case 1: Try to obtain a new instance of factory by default.
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
assertNotNull(dbf);
// case 2: Try to create a new instance of factory using
// property DATATYPEFACTORY_PROPERTY
className = System.getProperty("javax.xml.parsers.DocumentBuilderFactory");
System.setProperty("javax.xml.parsers.DocumentBuilderFactory",
"org.apache.harmony.xml.parsers.DocumentBuilderFactoryImpl");
dbf = DocumentBuilderFactory.newInstance();
assertNotNull(dbf);
assertTrue(dbf instanceof org.apache.harmony.xml.parsers.DocumentBuilderFactoryImpl);
// case 3: Try to create a new instance of factory using Property
String keyValuePair = "javax.xml.parsers.DocumentBuilderFactory"
+ "=" + "org.apache.harmony.xml.parsers.DocumentBuilderFactoryImpl";
ByteArrayInputStream bis = new ByteArrayInputStream(keyValuePair
.getBytes());
Properties prop = System.getProperties();
prop.load(bis);
dbf = DocumentBuilderFactory.newInstance();
assertNotNull(dbf);
assertTrue(dbf instanceof org.apache.harmony.xml.parsers.DocumentBuilderFactoryImpl);
// case 4: Check FactoryConfiguration error
System.setProperty("javax.xml.parsers.DocumentBuilderFactory", "");
try {
DocumentBuilderFactory.newInstance();
} catch (FactoryConfigurationError fce) {
// expected
}
} catch (Exception e) {
fail("Unexpected exception " + e.toString());
} finally {
// Set default value of Datatype factory,
// because of this test modifies it.
if (className == null) {
System.clearProperty("javax.xml.parsers.DocumentBuilderFactory");
} else {
System.setProperty("javax.xml.parsers.DocumentBuilderFactory",
className);
}
}
}
public void test_newDocumentBuilder() {
// Ordinary case
try {
DocumentBuilder db = dbf.newDocumentBuilder();
assertTrue(db instanceof DocumentBuilder);
db.parse(getClass().getResourceAsStream("/simple.xml"));
} catch(Exception e) {
throw new RuntimeException("Unexpected exception", e);
}
// Exception case
dbf.setValidating(true);
try {
DocumentBuilder db = dbf.newDocumentBuilder();
} catch(ParserConfigurationException e) {
// Expected, since Android doesn't have a validating parser.
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setAttribute(java.lang.String,
* java.lang.Object).
*/
// public void test_setAttributeLjava_lang_StringLjava_lang_Object() {
// String[] attributes = {
// "http://java.sun.com/xml/jaxp/properties/schemaLanguage",
// "http://java.sun.com/xml/jaxp/properties/schemaSource" };
// Object[] values = { "http://www.w3.org/2001/XMLSchema", "source" };
//
// try {
// for (int i = 0; i < attributes.length; i++) {
// dbf.setAttribute(attributes[i], values[i]);
// assertEquals(values[i], dbf.getAttribute(attributes[i]));
// }
// } catch (IllegalArgumentException e) {
// fail("Unexpected IllegalArgumentException" + e.getMessage());
// } catch (Exception e) {
// fail("Unexpected exception" + e.getMessage());
// }
//
// String[] badAttributes = {"bad1", "bad2", ""};
// try {
// for (int i = 0; i < badAttributes.length; i++) {
// dbf.setAttribute(badAttributes[i], "");
// fail("IllegalArgumentException expected");
// }
// } catch (IllegalArgumentException iae) {
// // expected
// }
//
// try {
// for (int i = 0; i < attributes.length; i++) {
// dbf.setAttribute(null, null);
// fail("NullPointerException expected");
// }
// } catch (NullPointerException e) {
// // expected
// }
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#setCoalescing(boolean).
*/
public void test_setCoalescingZ() {
dbf.setCoalescing(true);
assertTrue(dbf.isCoalescing());
textElements.clear();
cdataElements.clear();
Exception parseException = null;
DocumentBuilder parser = null;
try {
parser = dbf.newDocumentBuilder();
ValidationErrorHandler errorHandler = new ValidationErrorHandler();
parser.setErrorHandler(errorHandler);
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
parseException = errorHandler.getFirstException();
goThroughDocument((Node) document, "");
assertTrue(textElements
.contains("BeefParmesan<title>withGarlicAngelHairPasta</title>"));
} catch (Exception ex) {
parseException = ex;
}
parser.setErrorHandler(null);
if (parseException != null) {
fail("Unexpected exception " + parseException.getMessage());
}
dbf.setCoalescing(false);
assertFalse(dbf.isCoalescing());
textElements.clear();
cdataElements.clear();
try {
parser = dbf.newDocumentBuilder();
ValidationErrorHandler errorHandler = new ValidationErrorHandler();
parser.setErrorHandler(errorHandler);
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
parseException = errorHandler.getFirstException();
goThroughDocument((Node) document, "");
assertFalse(textElements
.contains("BeefParmesan<title>withGarlicAngelHairPasta</title>"));
} catch (Exception ex) {
parseException = ex;
}
parser.setErrorHandler(null);
if (parseException != null) {
fail("Unexpected exception " + parseException.getMessage());
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setExpandEntityReferences(boolean).
*/
public void test_setExpandEntityReferencesZ() {
dbf.setExpandEntityReferences(true);
assertTrue(dbf.isExpandEntityReferences());
Exception parseException = null;
DocumentBuilder parser = null;
try {
parser = dbf.newDocumentBuilder();
ValidationErrorHandler errorHandler = new ValidationErrorHandler();
parser.setErrorHandler(errorHandler);
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
parseException = errorHandler.getFirstException();
assertNotNull(document);
} catch (Exception ex) {
parseException = ex;
}
parser.setErrorHandler(null);
if (parseException != null) {
fail("Unexpected exception " + parseException.getMessage());
}
dbf.setExpandEntityReferences(false);
assertFalse(dbf.isExpandEntityReferences());
try {
parser = dbf.newDocumentBuilder();
ValidationErrorHandler errorHandler = new ValidationErrorHandler();
parser.setErrorHandler(errorHandler);
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
parseException = errorHandler.getFirstException();
assertNotNull(document);
} catch (Exception ex) {
parseException = ex;
}
parser.setErrorHandler(null);
if (parseException != null) {
fail("Unexpected exception " + parseException.getMessage());
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setFeature(java.lang.String).
*/
public void test_getSetFeatureLjava_lang_String() {
String[] features = { "http://xml.org/sax/features/namespaces",
"http://xml.org/sax/features/validation" };
try {
for (int i = 0; i < features.length; i++) {
dbf.setFeature(features[i], true);
assertTrue(dbf.getFeature(features[i]));
}
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException" + e.getMessage());
}
try {
for (int i = 0; i < features.length; i++) {
dbf.setFeature(features[i], false);
assertFalse(dbf.getFeature(features[i]));
}
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException" + e.getMessage());
}
try {
for (int i = 0; i < features.length; i++) {
dbf.setFeature(null, false);
fail("NullPointerException expected");
}
} catch (NullPointerException e) {
// expected
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException" + e.getMessage());
}
String[] badFeatures = { "bad1", "bad2", "" };
try {
for (int i = 0; i < badFeatures.length; i++) {
dbf.setFeature(badFeatures[i], false);
fail("ParserConfigurationException expected");
}
} catch (ParserConfigurationException e) {
// expected
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setIgnoringComments(boolean).
*/
public void test_setIgnoringCommentsZ() {
commentElements.clear();
dbf.setIgnoringComments(true);
assertTrue(dbf.isIgnoringComments());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
goThroughDocument((Node) document, "");
assertFalse(commentElements.contains("comment1"));
assertFalse(commentElements.contains("comment2"));
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
commentElements.clear();
dbf.setIgnoringComments(false);
assertFalse(dbf.isIgnoringComments());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
goThroughDocument((Node) document, "");
assertTrue(commentElements.contains("comment1"));
assertTrue(commentElements.contains("comment2"));
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setIgnoringElementContentWhitespace(boolean).
*/
public void test_setIgnoringElementContentWhitespaceZ() {
dbf.setIgnoringElementContentWhitespace(true);
assertTrue(dbf.isIgnoringElementContentWhitespace());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
assertNotNull(document);
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
dbf.setIgnoringElementContentWhitespace(false);
assertFalse(dbf.isIgnoringElementContentWhitespace());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
assertNotNull(document);
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setNamespaceAware(boolean).
*/
public void test_setNamespaceAwareZ() {
dbf.setNamespaceAware(true);
assertTrue(dbf.isNamespaceAware());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
assertNotNull(document);
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
dbf.setNamespaceAware(false);
assertFalse(dbf.isNamespaceAware());
try {
DocumentBuilder parser = dbf.newDocumentBuilder();
Document document = parser.parse(getClass().getResourceAsStream(
"/recipt.xml"));
assertNotNull(document);
} catch (IOException e) {
fail("Unexpected IOException " + e.getMessage());
} catch (ParserConfigurationException e) {
fail("Unexpected ParserConfigurationException " + e.getMessage());
} catch (SAXException e) {
fail("Unexpected SAXException " + e.getMessage());
}
}
public void test_getSetAttribute() {
// Android SAX implementation doesn't support attributes, so
// we can only make sure the expected exception is thrown.
try {
dbf.setAttribute("foo", new Object());
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {
// Expected
}
try {
dbf.getAttribute("foo");
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {
// Expected
}
}
/**
* javax.xml.parsers.DocumentBuilderFactory#setSchema(javax.xml.validation.Schema).
*/
//TODO(b/265202484): Fix JavaLangIllegalArgumentException: http://www.w3.org/2001/XMLSchema
// public void test_setSchemaLjavax_xml_validation_Schema()
// throws MalformedURLException, SAXException, SAXNotSupportedException {
// String language = XMLConstants.W3C_XML_SCHEMA_NS_URI;
// String propName = SchemaFactory.class.getName() + ":" + language;
// String className =
// "org.apache.harmony.tests.javax.xml.parsers.MockSchemaFactory";
// // Test `SchemaFactory.newInstance` methods. Set system property for
// // schema language to make `newInstance` use the mock implementation of
// // `SchemaFactory`.
// System.setProperty(propName, className);
// SchemaFactory f1 = SchemaFactory.newInstance(language);
// SchemaFactory f2 = SchemaFactory.newInstance(language, className, null);
// // Test `SchemaFactory.newSchema` method for `File`, `URL` and `Source`.
// try {
// Schema s = f1.newSchema();
// s = f1.newSchema(new File("test.dtd"));
// s = f1.newSchema(new URL("https://test.org"));
// s = f1.newSchema(new StreamSource());
// dbf.setSchema(s);
// assertNotNull(dbf.getSchema());
// } catch (UnsupportedOperationException e) {
// // Expected.
// }
// // Test `newFactory` method for the mock `SchemaFactoryLoader`.
// SchemaFactoryLoader loader = new MockSchemaFactoryLoader();
// assertNull(loader.newFactory(language));
// // Test `getFeature` (the mock implementation always throws).
// try {
// f2.getFeature("woot");
// } catch (SAXNotRecognizedException e) {
// // Expected.
// }
// // Test `setFeature` (the mock implementation always throws).
// try {
// f2.setFeature("woot", true);
// } catch (SAXNotRecognizedException e) {
// // Expected.
// }
// // Test `getProperty` (the mock implementation always throws).
// try {
// f2.getProperty("woot");
// } catch (SAXNotRecognizedException e) {
// // Expected.
// }
// // Test `setProperty` (the mock implementation always throws).
// try {
// f2.setProperty("woot", null);
// } catch (SAXNotRecognizedException e) {
// // Expected.
// }
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#setValidating(boolean).
*/
// public void test_setValidatingZ() {
// Exception parseException = null;
// DocumentBuilder parser = null;
// Document document = null;
//
// ValidationErrorHandler errorHandler = new ValidationErrorHandler();
//
// dbf.setValidating(false);
// assertFalse(dbf.isValidating());
//
// // case 1: Validation is not set. Correct xml-file
// try {
//
// parser = dbf.newDocumentBuilder();
// parser.setErrorHandler(errorHandler);
//
// document = parser.parse(getClass().getResourceAsStream(
// "/recipt.xml"));
//
// parseException = errorHandler.getFirstException();
//
// assertNotNull(document);
//
// document = parser.parse(getClass().getResourceAsStream(
// "/reciptWrong.xml"));
//
// parseException = errorHandler.getFirstException();
//
// assertNotNull(document);
//
// } catch (Exception ex) {
// parseException = ex;
// }
// parser.setErrorHandler(null);
//
// if (parseException != null) {
// fail("Unexpected exception " + parseException.getMessage());
// }
//
// // case 2: Validation is not set. Wrong xml-file
// try {
//
// parser = dbf.newDocumentBuilder();
// parser.setErrorHandler(errorHandler);
//
// document = parser.parse(getClass().getResourceAsStream(
// "/reciptWrong.xml"));
// parseException = errorHandler.getFirstException();
//
// assertNotNull(document);
//
// } catch (Exception ex) {
// parseException = ex;
// }
// parser.setErrorHandler(null);
//
// if (parseException != null) {
// fail("Unexpected exception " + parseException.getMessage());
// }
//
// // case 3: Validation is set. Correct xml-file
// dbf.setValidating(true);
// assertTrue(dbf.isValidating());
//
// try {
//
// parser = dbf.newDocumentBuilder();
// parser.setErrorHandler(errorHandler);
//
// document = parser.parse(getClass().getResourceAsStream(
// "/recipt.xml"));
// parseException = errorHandler.getFirstException();
//
// assertNotNull(document);
//
// } catch (Exception ex) {
// parseException = ex;
// }
// parser.setErrorHandler(null);
//
// if (parseException != null) {
// fail("Unexpected exception " + parseException.getMessage());
// }
//
// // case 4: Validation is set. Wrong xml-file
// try {
//
// parser = dbf.newDocumentBuilder();
// parser.setErrorHandler(errorHandler);
//
// document = parser.parse(getClass().getResourceAsStream(
// "/reciptWrong.xml"));
// parseException = errorHandler.getFirstException();
//
// assertNotNull(document);
//
// } catch (Exception ex) {
// parseException = ex;
// }
// parser.setErrorHandler(null);
//
// if (parseException == null) {
// fail("Unexpected exception " + parseException.getMessage());
// } else {
// assertTrue(parseException
// .getMessage()
// .contains(
// "The content of element type \"collection\" must match \"(description,recipe+)\""));
// }
//
// }
/**
* javax.xml.parsers.DocumentBuilderFactory#setXIncludeAware().
*/
// public void test_setXIncludeAware() {
// dbf.setXIncludeAware(true);
// assertTrue(dbf.isXIncludeAware());
//
// try {
// DocumentBuilder parser = dbf.newDocumentBuilder();
//
// Document document = parser.parse(getClass().getResourceAsStream(
// "/recipt.xml"));
//
// assertNotNull(document);
//
// } catch (IOException e) {
// fail("Unexpected IOException " + e.getMessage());
// } catch (ParserConfigurationException e) {
// fail("Unexpected ParserConfigurationException " + e.getMessage());
// } catch (SAXException e) {
// fail("Unexpected SAXException " + e.getMessage());
// }
//
// dbf.setXIncludeAware(false);
// assertFalse(dbf.isXIncludeAware());
//
// try {
// DocumentBuilder parser = dbf.newDocumentBuilder();
//
// Document document = parser.parse(getClass().getResourceAsStream(
// "/recipt.xml"));
//
// assertNotNull(document);
//
// } catch (IOException e) {
// fail("Unexpected IOException " + e.getMessage());
// } catch (ParserConfigurationException e) {
// fail("Unexpected ParserConfigurationException " + e.getMessage());
// } catch (SAXException e) {
// fail("Unexpected SAXException " + e.getMessage());
// }
// }
private void goThroughDocument(Node node, String indent) {
String value = node.getNodeValue();
if (value != null) {
value = value.replaceAll(" ", "");
value = value.replaceAll("\n", "");
}
switch (node.getNodeType()) {
case Node.CDATA_SECTION_NODE:
cdataElements.add(value);
// System.out.println(indent + "CDATA_SECTION_NODE " + value);
break;
case Node.COMMENT_NODE:
commentElements.add(value);
// System.out.println(indent + "COMMENT_NODE " + value);
break;
case Node.DOCUMENT_FRAGMENT_NODE:
// System.out.println(indent + "DOCUMENT_FRAGMENT_NODE " + value);
break;
case Node.DOCUMENT_NODE:
// System.out.println(indent + "DOCUMENT_NODE " + value);
break;
case Node.DOCUMENT_TYPE_NODE:
// System.out.println(indent + "DOCUMENT_TYPE_NODE " + value);
break;
case Node.ELEMENT_NODE:
// System.out.println(indent + "ELEMENT_NODE " + value);
break;
case Node.ENTITY_NODE:
// System.out.println(indent + "ENTITY_NODE " + value);
break;
case Node.ENTITY_REFERENCE_NODE:
// System.out.println(indent + "ENTITY_REFERENCE_NODE " + value);
break;
case Node.NOTATION_NODE:
// System.out.println(indent + "NOTATION_NODE " + value);
break;
case Node.PROCESSING_INSTRUCTION_NODE:
// System.out.println(indent + "PROCESSING_INSTRUCTION_NODE " +
// value);
break;
case Node.TEXT_NODE:
textElements.add(value);
// System.out.println(indent + "TEXT_NODE " + value);
break;
default:
// System.out.println(indent + "Unknown node " + value);
break;
}
NodeList list = node.getChildNodes();
for (int i = 0; i < list.getLength(); i++)
goThroughDocument(list.item(i), indent + " ");
}
private class ValidationErrorHandler implements ErrorHandler {
private SAXException parseException;
private int errorCount;
private int warningCount;
public ValidationErrorHandler() {
parseException = null;
errorCount = 0;
warningCount = 0;
}
public void error(SAXParseException ex) {
errorCount++;
if (parseException == null) {
parseException = ex;
}
}
public void warning(SAXParseException ex) {
warningCount++;
}
public void fatalError(SAXParseException ex) {
if (parseException == null) {
parseException = ex;
}
}
public SAXException getFirstException() {
return parseException;
}
}
private class DocumentBuilderFactoryChild extends DocumentBuilderFactory {
public DocumentBuilderFactoryChild() {
super();
}
public Object getAttribute(String name) {
return null;
}
public boolean getFeature(String name) {
return false;
}
public DocumentBuilder newDocumentBuilder() {
return null;
}
public void setAttribute(String name, Object value) {
}
public void setFeature(String name, boolean value) {
}
}
}
|
googleapis/google-cloud-java | 35,485 | java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/SslConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1beta/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1beta;
/**
*
*
* <pre>
* SSL configuration.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.SslConfig}
*/
public final class SslConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.SslConfig)
SslConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use SslConfig.newBuilder() to construct.
private SslConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SslConfig() {
sslMode_ = 0;
caSource_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SslConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_SslConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_SslConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.SslConfig.class,
com.google.cloud.alloydb.v1beta.SslConfig.Builder.class);
}
/**
*
*
* <pre>
* SSL mode options.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1beta.SslConfig.SslMode}
*/
public enum SslMode implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* SSL mode is not specified. Defaults to ENCRYPTED_ONLY.
* </pre>
*
* <code>SSL_MODE_UNSPECIFIED = 0;</code>
*/
SSL_MODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* SSL connections are optional. CA verification not enforced.
* </pre>
*
* <code>SSL_MODE_ALLOW = 1 [deprecated = true];</code>
*/
@java.lang.Deprecated
SSL_MODE_ALLOW(1),
/**
*
*
* <pre>
* SSL connections are required. CA verification not enforced.
* Clients may use locally self-signed certificates (default psql client
* behavior).
* </pre>
*
* <code>SSL_MODE_REQUIRE = 2 [deprecated = true];</code>
*/
@java.lang.Deprecated
SSL_MODE_REQUIRE(2),
/**
*
*
* <pre>
* SSL connections are required. CA verification enforced.
* Clients must have certificates signed by a Cluster CA, for example, using
* GenerateClientCertificate.
* </pre>
*
* <code>SSL_MODE_VERIFY_CA = 3 [deprecated = true];</code>
*/
@java.lang.Deprecated
SSL_MODE_VERIFY_CA(3),
/**
*
*
* <pre>
* SSL connections are optional. CA verification not enforced.
* </pre>
*
* <code>ALLOW_UNENCRYPTED_AND_ENCRYPTED = 4;</code>
*/
ALLOW_UNENCRYPTED_AND_ENCRYPTED(4),
/**
*
*
* <pre>
* SSL connections are required. CA verification not enforced.
* </pre>
*
* <code>ENCRYPTED_ONLY = 5;</code>
*/
ENCRYPTED_ONLY(5),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* SSL mode is not specified. Defaults to ENCRYPTED_ONLY.
* </pre>
*
* <code>SSL_MODE_UNSPECIFIED = 0;</code>
*/
public static final int SSL_MODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* SSL connections are optional. CA verification not enforced.
* </pre>
*
* <code>SSL_MODE_ALLOW = 1 [deprecated = true];</code>
*/
@java.lang.Deprecated public static final int SSL_MODE_ALLOW_VALUE = 1;
/**
*
*
* <pre>
* SSL connections are required. CA verification not enforced.
* Clients may use locally self-signed certificates (default psql client
* behavior).
* </pre>
*
* <code>SSL_MODE_REQUIRE = 2 [deprecated = true];</code>
*/
@java.lang.Deprecated public static final int SSL_MODE_REQUIRE_VALUE = 2;
/**
*
*
* <pre>
* SSL connections are required. CA verification enforced.
* Clients must have certificates signed by a Cluster CA, for example, using
* GenerateClientCertificate.
* </pre>
*
* <code>SSL_MODE_VERIFY_CA = 3 [deprecated = true];</code>
*/
@java.lang.Deprecated public static final int SSL_MODE_VERIFY_CA_VALUE = 3;
/**
*
*
* <pre>
* SSL connections are optional. CA verification not enforced.
* </pre>
*
* <code>ALLOW_UNENCRYPTED_AND_ENCRYPTED = 4;</code>
*/
public static final int ALLOW_UNENCRYPTED_AND_ENCRYPTED_VALUE = 4;
/**
*
*
* <pre>
* SSL connections are required. CA verification not enforced.
* </pre>
*
* <code>ENCRYPTED_ONLY = 5;</code>
*/
public static final int ENCRYPTED_ONLY_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SslMode valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SslMode forNumber(int value) {
switch (value) {
case 0:
return SSL_MODE_UNSPECIFIED;
case 1:
return SSL_MODE_ALLOW;
case 2:
return SSL_MODE_REQUIRE;
case 3:
return SSL_MODE_VERIFY_CA;
case 4:
return ALLOW_UNENCRYPTED_AND_ENCRYPTED;
case 5:
return ENCRYPTED_ONLY;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SslMode> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SslMode> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SslMode>() {
public SslMode findValueByNumber(int number) {
return SslMode.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.SslConfig.getDescriptor().getEnumTypes().get(0);
}
private static final SslMode[] VALUES = values();
public static SslMode valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SslMode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1beta.SslConfig.SslMode)
}
/**
*
*
* <pre>
* Certificate Authority (CA) source for SSL/TLS certificates.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1beta.SslConfig.CaSource}
*/
public enum CaSource implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Certificate Authority (CA) source not specified. Defaults to
* CA_SOURCE_MANAGED.
* </pre>
*
* <code>CA_SOURCE_UNSPECIFIED = 0;</code>
*/
CA_SOURCE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Certificate Authority (CA) managed by the AlloyDB Cluster.
* </pre>
*
* <code>CA_SOURCE_MANAGED = 1;</code>
*/
CA_SOURCE_MANAGED(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Certificate Authority (CA) source not specified. Defaults to
* CA_SOURCE_MANAGED.
* </pre>
*
* <code>CA_SOURCE_UNSPECIFIED = 0;</code>
*/
public static final int CA_SOURCE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Certificate Authority (CA) managed by the AlloyDB Cluster.
* </pre>
*
* <code>CA_SOURCE_MANAGED = 1;</code>
*/
public static final int CA_SOURCE_MANAGED_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CaSource valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CaSource forNumber(int value) {
switch (value) {
case 0:
return CA_SOURCE_UNSPECIFIED;
case 1:
return CA_SOURCE_MANAGED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CaSource> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<CaSource> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CaSource>() {
public CaSource findValueByNumber(int number) {
return CaSource.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.SslConfig.getDescriptor().getEnumTypes().get(1);
}
private static final CaSource[] VALUES = values();
public static CaSource valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CaSource(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1beta.SslConfig.CaSource)
}
public static final int SSL_MODE_FIELD_NUMBER = 1;
private int sslMode_ = 0;
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for sslMode.
*/
@java.lang.Override
public int getSslModeValue() {
return sslMode_;
}
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The sslMode.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig.SslMode getSslMode() {
com.google.cloud.alloydb.v1beta.SslConfig.SslMode result =
com.google.cloud.alloydb.v1beta.SslConfig.SslMode.forNumber(sslMode_);
return result == null ? com.google.cloud.alloydb.v1beta.SslConfig.SslMode.UNRECOGNIZED : result;
}
public static final int CA_SOURCE_FIELD_NUMBER = 2;
private int caSource_ = 0;
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for caSource.
*/
@java.lang.Override
public int getCaSourceValue() {
return caSource_;
}
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The caSource.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig.CaSource getCaSource() {
com.google.cloud.alloydb.v1beta.SslConfig.CaSource result =
com.google.cloud.alloydb.v1beta.SslConfig.CaSource.forNumber(caSource_);
return result == null
? com.google.cloud.alloydb.v1beta.SslConfig.CaSource.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (sslMode_
!= com.google.cloud.alloydb.v1beta.SslConfig.SslMode.SSL_MODE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, sslMode_);
}
if (caSource_
!= com.google.cloud.alloydb.v1beta.SslConfig.CaSource.CA_SOURCE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, caSource_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (sslMode_
!= com.google.cloud.alloydb.v1beta.SslConfig.SslMode.SSL_MODE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, sslMode_);
}
if (caSource_
!= com.google.cloud.alloydb.v1beta.SslConfig.CaSource.CA_SOURCE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, caSource_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1beta.SslConfig)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1beta.SslConfig other =
(com.google.cloud.alloydb.v1beta.SslConfig) obj;
if (sslMode_ != other.sslMode_) return false;
if (caSource_ != other.caSource_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SSL_MODE_FIELD_NUMBER;
hash = (53 * hash) + sslMode_;
hash = (37 * hash) + CA_SOURCE_FIELD_NUMBER;
hash = (53 * hash) + caSource_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.SslConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1beta.SslConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* SSL configuration.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.SslConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.SslConfig)
com.google.cloud.alloydb.v1beta.SslConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_SslConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_SslConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.SslConfig.class,
com.google.cloud.alloydb.v1beta.SslConfig.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1beta.SslConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
sslMode_ = 0;
caSource_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_SslConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1beta.SslConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig build() {
com.google.cloud.alloydb.v1beta.SslConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig buildPartial() {
com.google.cloud.alloydb.v1beta.SslConfig result =
new com.google.cloud.alloydb.v1beta.SslConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1beta.SslConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.sslMode_ = sslMode_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.caSource_ = caSource_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1beta.SslConfig) {
return mergeFrom((com.google.cloud.alloydb.v1beta.SslConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1beta.SslConfig other) {
if (other == com.google.cloud.alloydb.v1beta.SslConfig.getDefaultInstance()) return this;
if (other.sslMode_ != 0) {
setSslModeValue(other.getSslModeValue());
}
if (other.caSource_ != 0) {
setCaSourceValue(other.getCaSourceValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
sslMode_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
caSource_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int sslMode_ = 0;
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for sslMode.
*/
@java.lang.Override
public int getSslModeValue() {
return sslMode_;
}
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for sslMode to set.
* @return This builder for chaining.
*/
public Builder setSslModeValue(int value) {
sslMode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The sslMode.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig.SslMode getSslMode() {
com.google.cloud.alloydb.v1beta.SslConfig.SslMode result =
com.google.cloud.alloydb.v1beta.SslConfig.SslMode.forNumber(sslMode_);
return result == null
? com.google.cloud.alloydb.v1beta.SslConfig.SslMode.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The sslMode to set.
* @return This builder for chaining.
*/
public Builder setSslMode(com.google.cloud.alloydb.v1beta.SslConfig.SslMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
sslMode_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. SSL mode. Specifies client-server SSL/TLS connection behavior.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSslMode() {
bitField0_ = (bitField0_ & ~0x00000001);
sslMode_ = 0;
onChanged();
return this;
}
private int caSource_ = 0;
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for caSource.
*/
@java.lang.Override
public int getCaSourceValue() {
return caSource_;
}
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for caSource to set.
* @return This builder for chaining.
*/
public Builder setCaSourceValue(int value) {
caSource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The caSource.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig.CaSource getCaSource() {
com.google.cloud.alloydb.v1beta.SslConfig.CaSource result =
com.google.cloud.alloydb.v1beta.SslConfig.CaSource.forNumber(caSource_);
return result == null
? com.google.cloud.alloydb.v1beta.SslConfig.CaSource.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The caSource to set.
* @return This builder for chaining.
*/
public Builder setCaSource(com.google.cloud.alloydb.v1beta.SslConfig.CaSource value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
caSource_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is
* supported currently, and is the default value.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearCaSource() {
bitField0_ = (bitField0_ & ~0x00000002);
caSource_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.SslConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.SslConfig)
private static final com.google.cloud.alloydb.v1beta.SslConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.SslConfig();
}
public static com.google.cloud.alloydb.v1beta.SslConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SslConfig> PARSER =
new com.google.protobuf.AbstractParser<SslConfig>() {
@java.lang.Override
public SslConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SslConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SslConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.SslConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hama | 35,448 | core/src/main/java/org/apache/hama/util/Bytes.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hama.util;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Comparator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hama.Constants;
/**
* Utility class that handles byte arrays, conversions to/from other types,
* comparisons, hash code generation, manufacturing keys for HashMaps or
* HashSets, etc.
*/
public class Bytes {
private static final Log LOG = LogFactory.getLog(Bytes.class);
/**
* Size of boolean in bytes
*/
public static final int SIZEOF_BOOLEAN = Byte.SIZE / Byte.SIZE;
/**
* Size of byte in bytes
*/
public static final int SIZEOF_BYTE = SIZEOF_BOOLEAN;
/**
* Size of char in bytes
*/
public static final int SIZEOF_CHAR = Character.SIZE / Byte.SIZE;
/**
* Size of double in bytes
*/
public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE;
/**
* Size of float in bytes
*/
public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE;
/**
* Size of int in bytes
*/
public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE;
/**
* Size of long in bytes
*/
public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE;
/**
* Size of short in bytes
*/
public static final int SIZEOF_SHORT = Short.SIZE / Byte.SIZE;
/**
* Estimate of size cost to pay beyond payload in jvm for instance of byte [].
* Estimate based on study of jhat and jprofiler numbers.
*/
// JHat says BU is 56 bytes.
// SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
public static final int ESTIMATED_HEAP_TAX = 16;
/**
* Byte array comparator class.
*/
public static class ByteArrayComparator implements RawComparator<byte[]> {
/**
* Constructor
*/
public ByteArrayComparator() {
super();
}
@Override
public int compare(byte[] left, byte[] right) {
return compareTo(left, right);
}
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return compareTo(b1, s1, l1, b2, s2, l2);
}
}
/**
* Pass this to TreeMaps where byte [] are keys.
*/
public static Comparator<byte[]> BYTES_COMPARATOR = new ByteArrayComparator();
/**
* Use comparing byte arrays, byte-by-byte
*/
public static RawComparator<byte[]> BYTES_RAWCOMPARATOR = new ByteArrayComparator();
/**
* Read byte-array written with a WritableableUtils.vint prefix.
*
* @param in Input to read from.
* @return byte array read off <code>in</code>
* @throws IOException e
*/
public static byte[] readByteArray(final DataInput in) throws IOException {
int len = WritableUtils.readVInt(in);
if (len < 0) {
throw new NegativeArraySizeException(Integer.toString(len));
}
byte[] result = new byte[len];
in.readFully(result, 0, len);
return result;
}
/**
* Read byte-array written with a WritableableUtils.vint prefix. IOException
* is converted to a RuntimeException.
*
* @param in Input to read from.
* @return byte array read off <code>in</code>
*/
public static byte[] readByteArrayThrowsRuntime(final DataInput in) {
try {
return readByteArray(in);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Write byte-array with a WritableableUtils.vint prefix.
*
* @param out output stream to be written to
* @param b array to write
* @throws IOException e
*/
public static void writeByteArray(final DataOutput out, final byte[] b)
throws IOException {
if (b == null) {
WritableUtils.writeVInt(out, 0);
} else {
writeByteArray(out, b, 0, b.length);
}
}
/**
* Write byte-array to out with a vint length prefix.
*
* @param out output stream
* @param b array
* @param offset offset into array
* @param length length past offset
* @throws IOException e
*/
public static void writeByteArray(final DataOutput out, final byte[] b,
final int offset, final int length) throws IOException {
WritableUtils.writeVInt(out, length);
out.write(b, offset, length);
}
/**
* Write byte-array from src to tgt with a vint length prefix.
*
* @param tgt target array
* @param tgtOffset offset into target array
* @param src source array
* @param srcOffset source offset
* @param srcLength source length
* @return New offset in src array.
*/
public static int writeByteArray(final byte[] tgt, final int tgtOffset,
final byte[] src, final int srcOffset, final int srcLength) {
byte[] vint = vintToBytes(srcLength);
System.arraycopy(vint, 0, tgt, tgtOffset, vint.length);
int offset = tgtOffset + vint.length;
System.arraycopy(src, srcOffset, tgt, offset, srcLength);
return offset + srcLength;
}
/**
* Put bytes at the specified byte array position.
*
* @param tgtBytes the byte array
* @param tgtOffset position in the array
* @param srcBytes array to write out
* @param srcOffset source offset
* @param srcLength source length
* @return incremented offset
*/
public static int putBytes(byte[] tgtBytes, int tgtOffset, byte[] srcBytes,
int srcOffset, int srcLength) {
System.arraycopy(srcBytes, srcOffset, tgtBytes, tgtOffset, srcLength);
return tgtOffset + srcLength;
}
/**
* Write a single byte out to the specified byte array position.
*
* @param bytes the byte array
* @param offset position in the array
* @param b byte to write out
* @return incremented offset
*/
public static int putByte(byte[] bytes, int offset, byte b) {
bytes[offset] = b;
return offset + 1;
}
/**
* Returns a new byte array, copied from the passed ByteBuffer.
*
* @param bb A ByteBuffer
* @return the byte array
*/
public static byte[] toBytes(ByteBuffer bb) {
int length = bb.limit();
byte[] result = new byte[length];
System.arraycopy(bb.array(), bb.arrayOffset(), result, 0, length);
return result;
}
/**
* @param b Presumed UTF-8 encoded byte array.
* @return String made from <code>b</code>
*/
public static String toString(final byte[] b) {
if (b == null) {
return null;
}
return toString(b, 0, b.length);
}
/**
* Joins two byte arrays together using a separator.
*
* @param b1 The first byte array.
* @param sep The separator to use.
* @param b2 The second byte array.
*/
public static String toString(final byte[] b1, String sep, final byte[] b2) {
return toString(b1, 0, b1.length) + sep + toString(b2, 0, b2.length);
}
/**
* This method will convert utf8 encoded bytes into a string. If an
* UnsupportedEncodingException occurs, this method will eat it and return
* null instead.
*
* @param b Presumed UTF-8 encoded byte array.
* @param off offset into array
* @param len length of utf-8 sequence
* @return String made from <code>b</code> or null
*/
public static String toString(final byte[] b, int off, int len) {
if (b == null) {
return null;
}
if (len == 0) {
return "";
}
try {
return new String(b, off, len, Constants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
LOG.error("UTF-8 not supported?", e);
return null;
}
}
/**
* Write a printable representation of a byte array.
*
* @param b byte array
* @return string
* @see #toStringBinary(byte[], int, int)
*/
public static String toStringBinary(final byte[] b) {
return toStringBinary(b, 0, b.length);
}
/**
* Write a printable representation of a byte array. Non-printable characters
* are hex escaped in the format \\x%02X, eg: \x00 \x05 etc
*
* @param b array to write out
* @param off offset to start at
* @param len length to write
* @return string output
*/
public static String toStringBinary(final byte[] b, int off, int len) {
StringBuilder result = new StringBuilder();
try {
String first = new String(b, off, len, "ISO-8859-1");
for (int i = 0; i < first.length(); ++i) {
int ch = first.charAt(i) & 0xFF;
if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z')
|| (ch >= 'a' && ch <= 'z') || ch == ',' || ch == '_' || ch == '-'
|| ch == ':' || ch == ' ' || ch == '<' || ch == '>' || ch == '='
|| ch == '/' || ch == '.') {
result.append(first.charAt(i));
} else {
result.append(String.format("\\x%02X", ch));
}
}
} catch (UnsupportedEncodingException e) {
LOG.error("ISO-8859-1 not supported?", e);
}
return result.toString();
}
private static boolean isHexDigit(char c) {
return (c >= 'A' && c <= 'F') || (c >= '0' && c <= '9');
}
/**
* Takes a ASCII digit in the range A-F0-9 and returns the corresponding
* integer/ordinal value.
*
* @param ch The hex digit.
* @return The converted hex value as a byte.
*/
public static byte toBinaryFromHex(byte ch) {
if (ch >= 'A' && ch <= 'F')
return (byte) ((byte) 10 + (byte) (ch - 'A'));
// else
return (byte) (ch - '0');
}
public static byte[] toBytesBinary(String in) {
// this may be bigger than we need, but lets be safe.
byte[] b = new byte[in.length()];
int size = 0;
for (int i = 0; i < in.length(); ++i) {
char ch = in.charAt(i);
if (ch == '\\') {
// begin hex escape:
char next = in.charAt(i + 1);
if (next != 'x') {
// invalid escape sequence, ignore this one.
b[size++] = (byte) ch;
continue;
}
// ok, take next 2 hex digits.
char hd1 = in.charAt(i + 2);
char hd2 = in.charAt(i + 3);
// they need to be A-F0-9:
if (!isHexDigit(hd1) || !isHexDigit(hd2)) {
// bogus escape code, ignore:
continue;
}
// turn hex ASCII digit -> number
byte d = (byte) ((toBinaryFromHex((byte) hd1) << 4) + toBinaryFromHex((byte) hd2));
b[size++] = d;
i += 3; // skip 3
} else {
b[size++] = (byte) ch;
}
}
// resize:
byte[] b2 = new byte[size];
System.arraycopy(b, 0, b2, 0, size);
return b2;
}
/**
* Converts a string to a UTF-8 byte array.
*
* @param s string
* @return the byte array
*/
public static byte[] toBytes(String s) {
try {
return s.getBytes(Constants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
LOG.error("UTF-8 not supported?", e);
return null;
}
}
/**
* Convert a boolean to a byte array. True becomes -1 and false becomes 0.
*
* @param b value
* @return <code>b</code> encoded in a byte array.
*/
public static byte[] toBytes(final boolean b) {
return new byte[] { b ? (byte) -1 : (byte) 0 };
}
/**
* Reverses {@link #toBytes(boolean)}
*
* @param b array
* @return True or false.
*/
public static boolean toBoolean(final byte[] b) {
if (b.length != 1) {
throw new IllegalArgumentException("Array has wrong size: " + b.length);
}
return b[0] != (byte) 0;
}
/**
* Convert a long value to a byte array using big-endian.
*
* @param pVal value to convert
* @return the byte array
*/
public static byte[] toBytes(long pVal) {
long val = pVal;
byte[] b = new byte[8];
for (int i = 7; i > 0; i--) {
b[i] = (byte) val;
val >>>= 8;
}
b[0] = (byte) val;
return b;
}
/**
* Converts a byte array to a long value. Reverses {@link #toBytes(long)}
*
* @param bytes array
* @return the long value
*/
public static long toLong(byte[] bytes) {
return toLong(bytes, 0, SIZEOF_LONG);
}
/**
* Converts a byte array to a long value. Assumes there will be
* {@link #SIZEOF_LONG} bytes available.
*
* @param bytes bytes
* @param offset offset
* @return the long value
*/
public static long toLong(byte[] bytes, int offset) {
return toLong(bytes, offset, SIZEOF_LONG);
}
/**
* Converts a byte array to a long value.
*
* @param bytes array of bytes
* @param offset offset into array
* @param length length of data (must be {@link #SIZEOF_LONG})
* @return the long value
* @throws IllegalArgumentException if length is not {@link #SIZEOF_LONG} or
* if there's not enough room in the array at the offset indicated.
*/
public static long toLong(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_LONG || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_LONG);
}
long l = 0;
for (int i = offset; i < offset + length; i++) {
l <<= 8;
l ^= bytes[i] & 0xFF;
}
return l;
}
private static IllegalArgumentException explainWrongLengthOrOffset(
final byte[] bytes, final int offset, final int length,
final int expectedLength) {
String reason;
if (length != expectedLength) {
reason = "Wrong length: " + length + ", expected " + expectedLength;
} else {
reason = "offset (" + offset + ") + length (" + length + ") exceed the"
+ " capacity of the array: " + bytes.length;
}
return new IllegalArgumentException(reason);
}
/**
* Put a long value out to the specified byte array position.
*
* @param bytes the byte array
* @param offset position in the array
* @param pVal long to write out
* @return incremented offset
* @throws IllegalArgumentException if the byte array given doesn't have
* enough room at the offset specified.
*/
public static int putLong(byte[] bytes, int offset, long pVal) {
long val = pVal;
if (bytes.length - offset < SIZEOF_LONG) {
throw new IllegalArgumentException("Not enough room to put a long at"
+ " offset " + offset + " in a " + bytes.length + " byte array");
}
for (int i = offset + 7; i > offset; i--) {
bytes[i] = (byte) val;
val >>>= 8;
}
bytes[offset] = (byte) val;
return offset + SIZEOF_LONG;
}
/**
* Presumes float encoded as IEEE 754 floating-point "single format"
*
* @param bytes byte array
* @return Float made from passed byte array.
*/
public static float toFloat(byte[] bytes) {
return toFloat(bytes, 0);
}
/**
* Presumes float encoded as IEEE 754 floating-point "single format"
*
* @param bytes array to convert
* @param offset offset into array
* @return Float made from passed byte array.
*/
public static float toFloat(byte[] bytes, int offset) {
return Float.intBitsToFloat(toInt(bytes, offset, SIZEOF_INT));
}
/**
* @param bytes byte array
* @param offset offset to write to
* @param f float value
* @return New offset in <code>bytes</code>
*/
public static int putFloat(byte[] bytes, int offset, float f) {
return putInt(bytes, offset, Float.floatToRawIntBits(f));
}
/**
* @param f float value
* @return the float represented as byte []
*/
public static byte[] toBytes(final float f) {
// Encode it as int
return Bytes.toBytes(Float.floatToRawIntBits(f));
}
/**
* @param bytes byte array
* @return Return double made from passed bytes.
*/
public static double toDouble(final byte[] bytes) {
return toDouble(bytes, 0);
}
/**
* @param bytes byte array
* @param offset offset where double is
* @return Return double made from passed bytes.
*/
public static double toDouble(final byte[] bytes, final int offset) {
return Double.longBitsToDouble(toLong(bytes, offset, SIZEOF_LONG));
}
/**
* @param bytes byte array
* @param offset offset to write to
* @param d value
* @return New offset into array <code>bytes</code>
*/
public static int putDouble(byte[] bytes, int offset, double d) {
return putLong(bytes, offset, Double.doubleToLongBits(d));
}
/**
* Serialize a double as the IEEE 754 double format output. The resultant
* array will be 8 bytes long.
*
* @param d value
* @return the double represented as byte []
*/
public static byte[] toBytes(final double d) {
// Encode it as a long
return Bytes.toBytes(Double.doubleToRawLongBits(d));
}
/**
* Convert an int value to a byte array
*
* @param pVal value
* @return the byte array
*/
public static byte[] toBytes(int pVal) {
int val = pVal;
byte[] b = new byte[4];
for (int i = 3; i > 0; i--) {
b[i] = (byte) val;
val >>>= 8;
}
b[0] = (byte) val;
return b;
}
/**
* Converts a byte array to an int value
*
* @param bytes byte array
* @return the int value
*/
public static int toInt(byte[] bytes) {
return toInt(bytes, 0, SIZEOF_INT);
}
/**
* Converts a byte array to an int value
*
* @param bytes byte array
* @param offset offset into array
* @return the int value
*/
public static int toInt(byte[] bytes, int offset) {
return toInt(bytes, offset, SIZEOF_INT);
}
/**
* Converts a byte array to an int value
*
* @param bytes byte array
* @param offset offset into array
* @param length length of int (has to be {@link #SIZEOF_INT})
* @return the int value
* @throws IllegalArgumentException if length is not {@link #SIZEOF_INT} or if
* there's not enough room in the array at the offset indicated.
*/
public static int toInt(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_INT || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_INT);
}
int n = 0;
for (int i = offset; i < (offset + length); i++) {
n <<= 8;
n ^= bytes[i] & 0xFF;
}
return n;
}
/**
* Put an int value out to the specified byte array position.
*
* @param bytes the byte array
* @param offset position in the array
* @param pVal int to write out
* @return incremented offset
* @throws IllegalArgumentException if the byte array given doesn't have
* enough room at the offset specified.
*/
public static int putInt(byte[] bytes, int offset, int pVal) {
int val = pVal;
if (bytes.length - offset < SIZEOF_INT) {
throw new IllegalArgumentException("Not enough room to put an int at"
+ " offset " + offset + " in a " + bytes.length + " byte array");
}
for (int i = offset + 3; i > offset; i--) {
bytes[i] = (byte) val;
val >>>= 8;
}
bytes[offset] = (byte) val;
return offset + SIZEOF_INT;
}
/**
* Convert a short value to a byte array of {@link #SIZEOF_SHORT} bytes long.
*
* @param pVal value
* @return the byte array
*/
public static byte[] toBytes(short pVal) {
short val = pVal;
byte[] b = new byte[SIZEOF_SHORT];
b[1] = (byte) val;
val >>= 8;
b[0] = (byte) val;
return b;
}
/**
* Converts a byte array to a short value
*
* @param bytes byte array
* @return the short value
*/
public static short toShort(byte[] bytes) {
return toShort(bytes, 0, SIZEOF_SHORT);
}
/**
* Converts a byte array to a short value
*
* @param bytes byte array
* @param offset offset into array
* @return the short value
*/
public static short toShort(byte[] bytes, int offset) {
return toShort(bytes, offset, SIZEOF_SHORT);
}
/**
* Converts a byte array to a short value
*
* @param bytes byte array
* @param offset offset into array
* @param length length, has to be {@link #SIZEOF_SHORT}
* @return the short value
* @throws IllegalArgumentException if length is not {@link #SIZEOF_SHORT} or
* if there's not enough room in the array at the offset indicated.
*/
public static short toShort(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_SHORT || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_SHORT);
}
short n = 0;
n ^= bytes[offset] & 0xFF;
n <<= 8;
n ^= bytes[offset + 1] & 0xFF;
return n;
}
/**
* Put a short value out to the specified byte array position.
*
* @param bytes the byte array
* @param offset position in the array
* @param pVal short to write out
* @return incremented offset
* @throws IllegalArgumentException if the byte array given doesn't have
* enough room at the offset specified.
*/
public static int putShort(byte[] bytes, int offset, short pVal) {
short val = pVal;
if (bytes.length - offset < SIZEOF_SHORT) {
throw new IllegalArgumentException("Not enough room to put a short at"
+ " offset " + offset + " in a " + bytes.length + " byte array");
}
bytes[offset + 1] = (byte) val;
val >>= 8;
bytes[offset] = (byte) val;
return offset + SIZEOF_SHORT;
}
/**
* @param vint Integer to make a vint of.
* @return Vint as bytes array.
*/
public static byte[] vintToBytes(final long vint) {
long i = vint;
int size = WritableUtils.getVIntSize(i);
byte[] result = new byte[size];
int offset = 0;
if (i >= -112 && i <= 127) {
result[offset] = (byte) i;
return result;
}
int len = -112;
if (i < 0) {
i ^= -1L; // take one's complement'
len = -120;
}
long tmp = i;
while (tmp != 0) {
tmp = tmp >> 8;
len--;
}
result[offset++] = (byte) len;
len = (len < -120) ? -(len + 120) : -(len + 112);
for (int idx = len; idx != 0; idx--) {
int shiftbits = (idx - 1) * 8;
long mask = 0xFFL << shiftbits;
result[offset++] = (byte) ((i & mask) >> shiftbits);
}
return result;
}
/**
* @param buffer buffer to convert
* @return vint bytes as an integer.
*/
public static long bytesToVint(final byte[] buffer) {
int offset = 0;
byte firstByte = buffer[offset++];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
return firstByte;
}
long i = 0;
for (int idx = 0; idx < len - 1; idx++) {
byte b = buffer[offset++];
i = i << 8;
i = i | (b & 0xFF);
}
return (WritableUtils.isNegativeVInt(firstByte) ? ~i : i);
}
/**
* Reads a zero-compressed encoded long from input stream and returns it.
*
* @param buffer Binary array
* @param offset Offset into array at which vint begins.
* @throws java.io.IOException e
* @return deserialized long from stream.
*/
public static long readVLong(final byte[] buffer, final int offset)
throws IOException {
byte firstByte = buffer[offset];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
return firstByte;
}
long i = 0;
for (int idx = 0; idx < len - 1; idx++) {
byte b = buffer[offset + 1 + idx];
i = i << 8;
i = i | (b & 0xFF);
}
return (WritableUtils.isNegativeVInt(firstByte) ? ~i : i);
}
/**
* @param left left operand
* @param right right operand
* @return 0 if equal, < 0 if left is less than right, etc.
*/
public static int compareTo(final byte[] left, final byte[] right) {
return compareTo(left, 0, left.length, right, 0, right.length);
}
/**
* Lexographically compare two arrays.
*
* @param b1 left operand
* @param b2 right operand
* @param s1 Where to start comparing in the left buffer
* @param s2 Where to start comparing in the right buffer
* @param l1 How much to compare from the left buffer
* @param l2 How much to compare from the right buffer
* @return 0 if equal, < 0 if left is less than right, etc.
*/
public static int compareTo(byte[] b1, int s1, int l1, byte[] b2, int s2,
int l2) {
// Bring WritableComparator code local
int end1 = s1 + l1;
int end2 = s2 + l2;
for (int i = s1, j = s2; i < end1 && j < end2; i++, j++) {
int a = (b1[i] & 0xff);
int b = (b2[j] & 0xff);
if (a != b) {
return a - b;
}
}
return l1 - l2;
}
/**
* @param left left operand
* @param right right operand
* @return True if equal
*/
public static boolean equals(final byte[] left, final byte[] right) {
// Could use Arrays.equals?
// noinspection SimplifiableConditionalExpression
return left == null
&& right == null
|| (!(left == null || right == null || (left.length != right.length)) && compareTo(
left, right) == 0);
}
/**
* @param b bytes to hash
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the
* passed in array. This method is what
* {@link org.apache.hadoop.io.Text} use calculating hash code.
*/
public static int hashCode(final byte[] b) {
return hashCode(b, b.length);
}
/**
* @param b value
* @param length length of the value
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the
* passed in array. This method is what
* {@link org.apache.hadoop.io.Text} use calculating hash code.
*/
public static int hashCode(final byte[] b, final int length) {
return WritableComparator.hashBytes(b, length);
}
/**
* @param b bytes to hash
* @return A hash of <code>b</code> as an Integer that can be used as key in
* Maps.
*/
public static Integer mapKey(final byte[] b) {
return hashCode(b);
}
/**
* @param b bytes to hash
* @param length length to hash
* @return A hash of <code>b</code> as an Integer that can be used as key in
* Maps.
*/
public static Integer mapKey(final byte[] b, final int length) {
return hashCode(b, length);
}
/**
* @param a lower half
* @param b upper half
* @return New array that has a in lower half and b in upper half.
*/
public static byte[] add(final byte[] a, final byte[] b) {
return add(a, b, Constants.EMPTY_BYTE_ARRAY);
}
/**
* @param a first third
* @param b second third
* @param c third third
* @return New array made from a, b and c
*/
public static byte[] add(final byte[] a, final byte[] b, final byte[] c) {
byte[] result = new byte[a.length + b.length + c.length];
System.arraycopy(a, 0, result, 0, a.length);
System.arraycopy(b, 0, result, a.length, b.length);
System.arraycopy(c, 0, result, a.length + b.length, c.length);
return result;
}
/**
* @param a array
* @param length amount of bytes to grab
* @return First <code>length</code> bytes from <code>a</code>
*/
public static byte[] head(final byte[] a, final int length) {
if (a.length < length) {
return null;
}
byte[] result = new byte[length];
System.arraycopy(a, 0, result, 0, length);
return result;
}
/**
* @param a array
* @param length amount of bytes to snarf
* @return Last <code>length</code> bytes from <code>a</code>
*/
public static byte[] tail(final byte[] a, final int length) {
if (a.length < length) {
return null;
}
byte[] result = new byte[length];
System.arraycopy(a, a.length - length, result, 0, length);
return result;
}
/**
* @param a array
* @param length new array size
* @return Value in <code>a</code> plus <code>length</code> prepended 0 bytes
*/
public static byte[] padHead(final byte[] a, final int length) {
byte[] padding = new byte[length];
for (int i = 0; i < length; i++) {
padding[i] = 0;
}
return add(padding, a);
}
/**
* @param a array
* @param length new array size
* @return Value in <code>a</code> plus <code>length</code> appended 0 bytes
*/
public static byte[] padTail(final byte[] a, final int length) {
byte[] padding = new byte[length];
for (int i = 0; i < length; i++) {
padding[i] = 0;
}
return add(a, padding);
}
/**
* Split passed range. Expensive operation relatively. Uses BigInteger math.
*
* @param a Beginning of range
* @param b End of range
* @param num Number of times to split range. Pass 1 if you want to split the
* range in two; i.e. one split.
* @return Array of dividing values
*/
public static byte[][] split(final byte[] a, final byte[] b, final int num) {
byte[] aPadded;
byte[] bPadded;
if (a.length < b.length) {
aPadded = padTail(a, b.length - a.length);
bPadded = b;
} else if (b.length < a.length) {
aPadded = a;
bPadded = padTail(b, a.length - b.length);
} else {
aPadded = a;
bPadded = b;
}
if (compareTo(aPadded, bPadded) >= 0) {
throw new IllegalArgumentException("b <= a");
}
if (num <= 0) {
throw new IllegalArgumentException("num cannot be < 0");
}
byte[] prependHeader = { 1, 0 };
BigInteger startBI = new BigInteger(add(prependHeader, aPadded));
BigInteger stopBI = new BigInteger(add(prependHeader, bPadded));
BigInteger diffBI = stopBI.subtract(startBI);
BigInteger splitsBI = BigInteger.valueOf(num + 1);
if (diffBI.compareTo(splitsBI) < 0) {
return null;
}
BigInteger intervalBI;
try {
intervalBI = diffBI.divide(splitsBI);
} catch (Exception e) {
LOG.error("Exception caught during division", e);
return null;
}
byte[][] result = new byte[num + 2][];
result[0] = a;
for (int i = 1; i <= num; i++) {
BigInteger curBI = startBI
.add(intervalBI.multiply(BigInteger.valueOf(i)));
byte[] padded = curBI.toByteArray();
if (padded[1] == 0)
padded = tail(padded, padded.length - 2);
else
padded = tail(padded, padded.length - 1);
result[i] = padded;
}
result[num + 1] = b;
return result;
}
/**
* @param t operands
* @return Array of byte arrays made from passed array of Text
*/
public static byte[][] toByteArrays(final String[] t) {
byte[][] result = new byte[t.length][];
for (int i = 0; i < t.length; i++) {
result[i] = Bytes.toBytes(t[i]);
}
return result;
}
/**
* @param column operand
* @return A byte array of a byte array where first and only entry is
* <code>column</code>
*/
public static byte[][] toByteArrays(final String column) {
return toByteArrays(toBytes(column));
}
/**
* @param column operand
* @return A byte array of a byte array where first and only entry is
* <code>column</code>
*/
public static byte[][] toByteArrays(final byte[] column) {
byte[][] result = new byte[1][];
result[0] = column;
return result;
}
/**
* Binary search for keys in indexes.
*
* @param arr array of byte arrays to search for
* @param key the key you want to find
* @param offset the offset in the key you want to find
* @param length the length of the key
* @param comparator a comparator to compare.
* @return index of key
*/
public static int binarySearch(byte[][] arr, byte[] key, int offset,
int length, RawComparator<byte[]> comparator) {
int low = 0;
int high = arr.length - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
// we have to compare in this order, because the comparator order
// has special logic when the 'left side' is a special key.
int cmp = comparator.compare(key, offset, length, arr[mid], 0,
arr[mid].length);
// key lives above the midpoint
if (cmp > 0)
low = mid + 1;
// key lives below the midpoint
else if (cmp < 0)
high = mid - 1;
// BAM. how often does this really happen?
else
return mid;
}
return -(low + 1);
}
/**
* Bytewise binary increment/deincrement of long contained in byte array on
* given amount.
*
* @param value - array of bytes containing long (length <= SIZEOF_LONG)
* @param amount value will be incremented on (deincremented if negative)
* @return array of bytes containing incremented long (length == SIZEOF_LONG)
* @throws IOException - if value.length > SIZEOF_LONG
*/
public static byte[] incrementBytes(byte[] value, long amount)
throws IOException {
byte[] val = value;
if (val.length < SIZEOF_LONG) {
// Hopefully this doesn't happen too often.
byte[] newvalue;
if (val[0] < 0) {
newvalue = new byte[] { -1, -1, -1, -1, -1, -1, -1, -1 };
} else {
newvalue = new byte[SIZEOF_LONG];
}
System.arraycopy(val, 0, newvalue, newvalue.length - val.length,
val.length);
val = newvalue;
} else if (val.length > SIZEOF_LONG) {
throw new IllegalArgumentException("Increment Bytes - value too big: "
+ val.length);
}
if (amount == 0)
return val;
if (val[0] < 0) {
return binaryIncrementNeg(val, amount);
}
return binaryIncrementPos(val, amount);
}
/* increment/deincrement for positive value */
private static byte[] binaryIncrementPos(byte[] value, long amount) {
long amo = amount;
int sign = 1;
if (amount < 0) {
amo = -amount;
sign = -1;
}
for (int i = 0; i < value.length; i++) {
int cur = ((int) amo % 256) * sign;
amo = (amo >> 8);
int val = value[value.length - i - 1] & 0x0ff;
int total = val + cur;
if (total > 255) {
amo += sign;
total %= 256;
} else if (total < 0) {
amo -= sign;
}
value[value.length - i - 1] = (byte) total;
if (amo == 0)
return value;
}
return value;
}
/* increment/deincrement for negative value */
private static byte[] binaryIncrementNeg(byte[] value, long amount) {
long amo = amount;
int sign = 1;
if (amount < 0) {
amo = -amount;
sign = -1;
}
for (int i = 0; i < value.length; i++) {
int cur = ((int) amo % 256) * sign;
amo = (amo >> 8);
int val = ((~value[value.length - i - 1]) & 0x0ff) + 1;
int total = cur - val;
if (total >= 0) {
amo += sign;
} else if (total < -256) {
amo -= sign;
total %= 256;
}
value[value.length - i - 1] = (byte) total;
if (amo == 0)
return value;
}
return value;
}
}
|
apache/eventmesh | 35,677 | eventmesh-runtime/src/main/java/org/apache/eventmesh/runtime/metrics/http/HttpMetrics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eventmesh.runtime.metrics.http;
import org.apache.eventmesh.metrics.api.model.InstrumentFurther;
import org.apache.eventmesh.metrics.api.model.LongCounterMetric;
import org.apache.eventmesh.metrics.api.model.Metric;
import org.apache.eventmesh.metrics.api.model.ObservableDoubleGaugeMetric;
import org.apache.eventmesh.metrics.api.model.ObservableLongGaugeMetric;
import org.apache.eventmesh.runtime.metrics.MetricInstrumentUnit;
import org.apache.commons.collections4.MapUtils;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicLong;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
public class HttpMetrics {
private static Attributes EMPTY = Attributes.builder().build();
private static final int STATIC_PERIOD = 30 * 1000;
private static final String HTTP_METRICS_NAME_PREFIX = "eventmesh.http.";
private static final String METRIC_NAME = "HTTP";
private float wholeCost = 0f;
private final AtomicLong wholeRequestNum = new AtomicLong(0);
//cumulative value
private final AtomicLong httpDiscard = new AtomicLong(0);
private LongCounterMetric httpDiscardMetric;
private final AtomicLong maxCost = new AtomicLong(0);
private final AtomicLong httpRequestPerSecond = new AtomicLong(0);
private final LinkedList<Integer> httpRequestTPSSnapshots = new LinkedList<>();
private float httpDecodeTimeCost = 0f;
private final AtomicLong httpDecodeNum = new AtomicLong(0);
private final AtomicLong sendBatchMsgNumPerSecond = new AtomicLong(0);
private final AtomicLong sendBatchMsgNumSum = new AtomicLong(0);
private LongCounterMetric sendBatchMsgNumSumMetric;
private final AtomicLong sendBatchMsgFailNumSum = new AtomicLong(0);
private LongCounterMetric sendBatchMsgFailNumSumMetric;
// This is a cumulative value
private final AtomicLong sendBatchMsgDiscardNumSum = new AtomicLong(0);
private LongCounterMetric sendBatchMsgDiscardNumSumMetric;
private final LinkedList<Integer> sendBatchMsgTPSSnapshots = new LinkedList<Integer>();
private final AtomicLong sendMsgNumSum = new AtomicLong(0);
private LongCounterMetric sendMsgNumSumMetric;
private final AtomicLong sendMsgFailNumSum = new AtomicLong(0);
private LongCounterMetric sendMsgFailNumSumMetric;
private final AtomicLong replyMsgNumSum = new AtomicLong(0);
private LongCounterMetric replyMsgNumSumMetric;
private final AtomicLong replyMsgFailNumSum = new AtomicLong(0);
private LongCounterMetric replyMsgFailNumSumMetric;
private final AtomicLong sendMsgNumPerSecond = new AtomicLong(0);
private final LinkedList<Integer> sendMsgTPSSnapshots = new LinkedList<Integer>();
private float wholePushCost = 0f;
private final AtomicLong wholePushRequestNum = new AtomicLong(0);
private final AtomicLong maxHttpPushLatency = new AtomicLong(0);
private final AtomicLong pushMsgNumPerSecond = new AtomicLong(0);
private final LinkedList<Integer> pushMsgTPSSnapshots = new LinkedList<Integer>();
private final AtomicLong httpPushMsgNumSum = new AtomicLong(0);
private LongCounterMetric httpPushMsgNumSumMetric;
private final AtomicLong httpPushFailNumSum = new AtomicLong(0);
private LongCounterMetric httpPushFailNumSumMetric;
private float batchSend2MQWholeCost = 0f;
private final AtomicLong batchSend2MQNum = new AtomicLong(0);
private float send2MQWholeCost = 0f;
private final AtomicLong send2MQNum = new AtomicLong(0);
private float reply2MQWholeCost = 0f;
private final AtomicLong reply2MQNum = new AtomicLong(0);
// execute metrics
private final ThreadPoolExecutor batchMsgExecutor;
private final ThreadPoolExecutor sendMsgExecutor;
private final ThreadPoolExecutor pushMsgExecutor;
private final DelayQueue<?> httpFailedQueue;
private final Map<String, String> labelMap;
private final Map<String, Metric> metrics = new HashMap<>(32);
private ObservableDoubleGaugeMetric avgHttpBodyDecodeCostMetric;
private ObservableDoubleGaugeMetric maxHttpTpsMetric;
private ObservableDoubleGaugeMetric avgHttpTpsMetric;
private ObservableLongGaugeMetric maxHttpCostMetric;
private ObservableDoubleGaugeMetric avgHttpCostMetric;
private ObservableDoubleGaugeMetric maxBatchSendMsgTpsMetric;
private ObservableDoubleGaugeMetric avgBatchSendMsgTpsMetric;
private ObservableDoubleGaugeMetric sumBatchFailRateMetric;
private ObservableDoubleGaugeMetric maxSendMsgTpsMetric;
private ObservableDoubleGaugeMetric avgSendMsgTpsMetric;
private ObservableDoubleGaugeMetric sumFailRateMetric;
private ObservableDoubleGaugeMetric maxPushMsgTpsMetric;
private ObservableDoubleGaugeMetric avgPushMsgTpsMetric;
private ObservableDoubleGaugeMetric pushSumFailRateMetric;
private ObservableDoubleGaugeMetric maxClientLatencyMetric;
private ObservableDoubleGaugeMetric avgClientLatencyMetric;
private ObservableLongGaugeMetric batchMsgQMetric;
private ObservableLongGaugeMetric sendMsgQMetric;
private ObservableLongGaugeMetric pushMsgQMetric;
private ObservableLongGaugeMetric httpRetryQMetric;
private ObservableDoubleGaugeMetric batchAvgSend2MQCostMetric;
private ObservableDoubleGaugeMetric avgSend2MQCostMetric;
private ObservableDoubleGaugeMetric avgReply2MQCostMetric;
public HttpMetrics(final ThreadPoolExecutor batchMsgExecutor,
final ThreadPoolExecutor sendMsgExecutor,
final ThreadPoolExecutor pushMsgExecutor,
final DelayQueue<?> httpFailedQueue,
final Map<String, String> labelMap) {
this.batchMsgExecutor = batchMsgExecutor;
this.sendMsgExecutor = sendMsgExecutor;
this.pushMsgExecutor = pushMsgExecutor;
this.httpFailedQueue = httpFailedQueue;
this.labelMap = Optional.ofNullable(labelMap).orElse(new HashMap<>(0));
initMetrics();
}
private void initMetrics() {
InstrumentFurther furtherHttpDiscard = new InstrumentFurther();
furtherHttpDiscard.setUnit(MetricInstrumentUnit.SINGLETON);
furtherHttpDiscard.setDescription("Http request discard num.");
furtherHttpDiscard.setName(HTTP_METRICS_NAME_PREFIX + "request.discard.num");
httpDiscardMetric = new LongCounterMetric(furtherHttpDiscard, METRIC_NAME);
metrics.put("httpDiscardMetric", httpDiscardMetric);
//sum of batch send message number
InstrumentFurther furtherSendBatchMsgNumSum = new InstrumentFurther();
furtherSendBatchMsgNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendBatchMsgNumSum.setDescription("Sum of batch send message number.");
furtherSendBatchMsgNumSum.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.num");
sendBatchMsgNumSumMetric = new LongCounterMetric(furtherSendBatchMsgNumSum, METRIC_NAME);
metrics.put("sendBatchMsgNumSumMetric", sendBatchMsgNumSumMetric);
//sum of batch send message fail message number.
InstrumentFurther furtherSendBatchMsgFailNumSum = new InstrumentFurther();
furtherSendBatchMsgFailNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendBatchMsgFailNumSum.setDescription("Sum of batch send message fail message number.");
furtherSendBatchMsgFailNumSum.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.fail.num");
sendBatchMsgFailNumSumMetric = new LongCounterMetric(furtherSendBatchMsgFailNumSum, METRIC_NAME);
metrics.put("sendBatchMsgFailNumSumMetric", sendBatchMsgFailNumSumMetric);
//sum of send batch message discard number.
InstrumentFurther furtherSendBatchMsgDiscardNumSum = new InstrumentFurther();
furtherSendBatchMsgDiscardNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendBatchMsgDiscardNumSum.setDescription("Sum of batch send message fail message number.");
furtherSendBatchMsgDiscardNumSum.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.discard.num");
sendBatchMsgDiscardNumSumMetric = new LongCounterMetric(furtherSendBatchMsgDiscardNumSum, METRIC_NAME);
metrics.put("sendBatchMsgDiscardNumSumMetric", sendBatchMsgDiscardNumSumMetric);
//Sum of send message number.
InstrumentFurther furtherSendMsgNumSum = new InstrumentFurther();
furtherSendMsgNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendMsgNumSum.setDescription("Sum of send message number.");
furtherSendMsgNumSum.setName(HTTP_METRICS_NAME_PREFIX + "send.message.num");
sendMsgNumSumMetric = new LongCounterMetric(furtherSendMsgNumSum, METRIC_NAME);
metrics.put("sendMsgNumSumMetric", sendMsgNumSumMetric);
//Sum of send message fail number.
InstrumentFurther furtherSendMsgFailNumSum = new InstrumentFurther();
furtherSendMsgFailNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendMsgFailNumSum.setDescription("Sum of send message fail number.");
furtherSendMsgFailNumSum.setName(HTTP_METRICS_NAME_PREFIX + "send.message.fail.num");
sendMsgFailNumSumMetric = new LongCounterMetric(furtherSendMsgFailNumSum, METRIC_NAME);
metrics.put("sendMsgFailNumSumMetric", sendMsgFailNumSumMetric);
//Sum of reply message number.
InstrumentFurther furtherReplyMsgNumSum = new InstrumentFurther();
furtherReplyMsgNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherReplyMsgNumSum.setDescription("Sum of reply message number.");
furtherReplyMsgNumSum.setName(HTTP_METRICS_NAME_PREFIX + "reply.message.num");
replyMsgNumSumMetric = new LongCounterMetric(furtherReplyMsgNumSum, METRIC_NAME);
metrics.put("replyMsgNumSumMetric", replyMsgNumSumMetric);
//Sum of reply message fail number.
InstrumentFurther furtherReplyMsgFailNumSum = new InstrumentFurther();
furtherReplyMsgFailNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherReplyMsgFailNumSum.setDescription("Sum of reply message fail number.");
furtherReplyMsgFailNumSum.setName(HTTP_METRICS_NAME_PREFIX + "reply.message.fail.num");
replyMsgFailNumSumMetric = new LongCounterMetric(furtherReplyMsgFailNumSum, METRIC_NAME);
metrics.put("replyMsgFailNumSumMetric", replyMsgFailNumSumMetric);
//Sum of http push message number.
InstrumentFurther furtherHttpPushMsgNumSum = new InstrumentFurther();
furtherHttpPushMsgNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherHttpPushMsgNumSum.setDescription("Sum of http push message number.");
furtherHttpPushMsgNumSum.setName(HTTP_METRICS_NAME_PREFIX + "push.message.num");
httpPushMsgNumSumMetric = new LongCounterMetric(furtherHttpPushMsgNumSum, METRIC_NAME);
metrics.put("httpPushMsgNumSumMetric", httpPushMsgNumSumMetric);
//Sum of http push message fail number.
InstrumentFurther furtherHttpPushFailNumSum = new InstrumentFurther();
furtherHttpPushFailNumSum.setUnit(MetricInstrumentUnit.SINGLETON);
furtherHttpPushFailNumSum.setDescription("Sum of http push message fail number.");
furtherHttpPushFailNumSum.setName(HTTP_METRICS_NAME_PREFIX + "push.message.fail.num");
httpPushFailNumSumMetric = new LongCounterMetric(furtherHttpPushFailNumSum, METRIC_NAME);
metrics.put("httpPushFailNumSumMetric", httpPushFailNumSumMetric);
//avg body decode cost time of http
InstrumentFurther furtherHttpDecode = new InstrumentFurther();
furtherHttpDecode.setUnit(MetricInstrumentUnit.MILLISECONDS);
furtherHttpDecode.setDescription("Avg body decode cost time of http");
furtherHttpDecode.setName(HTTP_METRICS_NAME_PREFIX + "body.decode.cost.avg");
avgHttpBodyDecodeCostMetric = new ObservableDoubleGaugeMetric(furtherHttpDecode, METRIC_NAME, () -> this.avgHTTPBodyDecodeCost());
avgHttpBodyDecodeCostMetric.putAll(labelMap);
metrics.put("avgHttpBodyDecodeCostMetric", avgHttpBodyDecodeCostMetric);
//Max TPS of HTTP.
InstrumentFurther furtherMaxHttpTps = new InstrumentFurther();
furtherMaxHttpTps.setUnit(MetricInstrumentUnit.TPS);
furtherMaxHttpTps.setDescription("Max TPS of HTTP.");
furtherMaxHttpTps.setName(HTTP_METRICS_NAME_PREFIX + "request.tps.max");
maxHttpTpsMetric = new ObservableDoubleGaugeMetric(furtherMaxHttpTps, METRIC_NAME, () -> this.maxHTTPTPS());
maxHttpTpsMetric.putAll(labelMap);
metrics.put("maxHttpTpsMetric", maxHttpTpsMetric);
//Avg TPS of HTTP.
InstrumentFurther furtherAvgHttpTps = new InstrumentFurther();
furtherAvgHttpTps.setUnit(MetricInstrumentUnit.TPS);
furtherAvgHttpTps.setDescription("Avg TPS of HTTP.");
furtherAvgHttpTps.setName(HTTP_METRICS_NAME_PREFIX + "request.tps.avg");
avgHttpTpsMetric = new ObservableDoubleGaugeMetric(furtherAvgHttpTps, METRIC_NAME, () -> this.avgHTTPTPS());
avgHttpTpsMetric.putAll(labelMap);
metrics.put("avgHttpTpsMetric", avgHttpTpsMetric);
//max cost of HTTP.
InstrumentFurther furtherMaxCostHttpTps = new InstrumentFurther();
furtherMaxCostHttpTps.setUnit(MetricInstrumentUnit.MILLISECONDS);
furtherMaxCostHttpTps.setDescription("Max cost of HTTP.");
furtherMaxCostHttpTps.setName(HTTP_METRICS_NAME_PREFIX + "request.cost.max");
maxHttpCostMetric = new ObservableLongGaugeMetric(furtherMaxCostHttpTps, METRIC_NAME, () -> this.maxHTTPCost());
maxHttpCostMetric.putAll(labelMap);
metrics.put("maxHttpCostMetric", maxHttpCostMetric);
//Avg cost of HTTP.
InstrumentFurther furtherAvgHttpCost = new InstrumentFurther();
furtherAvgHttpCost.setUnit(MetricInstrumentUnit.TPS);
furtherAvgHttpCost.setDescription("Avg cost of HTTP.");
furtherAvgHttpCost.setName(HTTP_METRICS_NAME_PREFIX + "request.cost.avg");
avgHttpCostMetric = new ObservableDoubleGaugeMetric(furtherAvgHttpCost, METRIC_NAME, () -> this.avgHTTPCost());
avgHttpCostMetric.putAll(labelMap);
metrics.put("avgHttpCostMetric", avgHttpCostMetric);
//Max of batch send message tps
InstrumentFurther furtherMaxBatchSendMsgTps = new InstrumentFurther();
furtherMaxBatchSendMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherMaxBatchSendMsgTps.setDescription("Max of batch send message tps");
furtherMaxBatchSendMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.tps.max");
maxBatchSendMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherMaxBatchSendMsgTps, METRIC_NAME, () -> this.maxSendBatchMsgTPS());
maxBatchSendMsgTpsMetric.putAll(labelMap);
metrics.put("maxBatchSendMsgTpsMetric", maxBatchSendMsgTpsMetric);
//Avg of batch send message tps.
InstrumentFurther furtherAvgBatchSendMsgTps = new InstrumentFurther();
furtherAvgBatchSendMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherAvgBatchSendMsgTps.setDescription("Avg of batch send message tps.");
furtherAvgBatchSendMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.tps.avg");
avgBatchSendMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherAvgBatchSendMsgTps, METRIC_NAME, () -> this.avgSendBatchMsgTPS());
avgBatchSendMsgTpsMetric.putAll(labelMap);
metrics.put("avgBatchSendMsgTpsMetric", avgBatchSendMsgTpsMetric);
//Send batch message fail rate.
InstrumentFurther furtherSumBatchFailRate = new InstrumentFurther();
furtherSumBatchFailRate.setUnit(MetricInstrumentUnit.PERCENT);
furtherSumBatchFailRate.setDescription("Send batch message fail rate.");
furtherSumBatchFailRate.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.fail.rate");
sumBatchFailRateMetric = new ObservableDoubleGaugeMetric(furtherSumBatchFailRate, METRIC_NAME, () -> this.getSendBatchMsgFailRate());
sumBatchFailRateMetric.putAll(labelMap);
metrics.put("sumBatchFailRateMetric", sumBatchFailRateMetric);
//Max of send message tps
InstrumentFurther furtherMaxSendMsgTps = new InstrumentFurther();
furtherMaxSendMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherMaxSendMsgTps.setDescription("Max of send message tps");
furtherMaxSendMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "send.message.tps.max");
maxSendMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherMaxSendMsgTps, METRIC_NAME, () -> this.maxSendMsgTPS());
maxSendMsgTpsMetric.putAll(labelMap);
metrics.put("maxSendMsgTpsMetric", maxSendMsgTpsMetric);
//Avg of send message tps
InstrumentFurther furtherAvgSendMsgTps = new InstrumentFurther();
furtherAvgSendMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherAvgSendMsgTps.setDescription("Avg of send message tps");
furtherAvgSendMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "send.message.tps.avg");
avgSendMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherAvgSendMsgTps, METRIC_NAME, () -> this.avgSendMsgTPS());
avgSendMsgTpsMetric.putAll(labelMap);
metrics.put("avgSendMsgTpsMetric", avgSendMsgTpsMetric);
//Send message fail rate.
InstrumentFurther furtherSumFailRate = new InstrumentFurther();
furtherSumFailRate.setUnit(MetricInstrumentUnit.PERCENT);
furtherSumFailRate.setDescription("Send message fail rate.");
furtherSumFailRate.setName(HTTP_METRICS_NAME_PREFIX + "send.message.fail.rate");
sumFailRateMetric = new ObservableDoubleGaugeMetric(furtherSumFailRate, METRIC_NAME, () -> this.getSendBatchMsgFailRate());
sumFailRateMetric.putAll(labelMap);
metrics.put("sumFailRateMetric", sumFailRateMetric);
//Max of push message tps.
InstrumentFurther furtherMaxPushMsgTps = new InstrumentFurther();
furtherMaxPushMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherMaxPushMsgTps.setDescription("Max of push message tps.");
furtherMaxPushMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "push.message.tps.max");
maxPushMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherMaxPushMsgTps, METRIC_NAME, () -> this.maxPushMsgTPS());
maxPushMsgTpsMetric.putAll(labelMap);
metrics.put("maxPushMsgTpsMetric", maxPushMsgTpsMetric);
//Avg of push message tps.
InstrumentFurther furtherAvgPushMsgTps = new InstrumentFurther();
furtherAvgPushMsgTps.setUnit(MetricInstrumentUnit.TPS);
furtherAvgPushMsgTps.setDescription("Avg of push message tps.");
furtherAvgPushMsgTps.setName(HTTP_METRICS_NAME_PREFIX + "push.message.tps.avg");
avgPushMsgTpsMetric = new ObservableDoubleGaugeMetric(furtherAvgPushMsgTps, METRIC_NAME, () -> this.avgPushMsgTPS());
avgPushMsgTpsMetric.putAll(labelMap);
metrics.put("avgPushMsgTpsMetric", avgPushMsgTpsMetric);
//Http push message fail rate.
InstrumentFurther furtherPushSumFailRate = new InstrumentFurther();
furtherPushSumFailRate.setUnit(MetricInstrumentUnit.PERCENT);
furtherPushSumFailRate.setDescription("Http push message fail rate.");
furtherPushSumFailRate.setName(HTTP_METRICS_NAME_PREFIX + "push.message.fail.rate");
pushSumFailRateMetric = new ObservableDoubleGaugeMetric(furtherPushSumFailRate, METRIC_NAME, () -> this.getHttpPushMsgFailRate());
pushSumFailRateMetric.putAll(labelMap);
metrics.put("pushSumFailRateMetric", pushSumFailRateMetric);
//Max of http push latency.
InstrumentFurther furtherMaxClientLatency = new InstrumentFurther();
furtherMaxClientLatency.setUnit(MetricInstrumentUnit.MILLISECONDS);
furtherMaxClientLatency.setDescription("Max of http push latency.");
furtherMaxClientLatency.setName(HTTP_METRICS_NAME_PREFIX + "push.latency.max");
maxClientLatencyMetric = new ObservableDoubleGaugeMetric(furtherMaxClientLatency, METRIC_NAME, () -> this.maxHTTPPushLatency());
maxClientLatencyMetric.putAll(labelMap);
metrics.put("maxClientLatencyMetric", maxClientLatencyMetric);
//Avg of http push latency.
InstrumentFurther furtherAvgClientLatency = new InstrumentFurther();
furtherAvgClientLatency.setUnit(MetricInstrumentUnit.MILLISECONDS);
furtherAvgClientLatency.setDescription("Avg of http push latency.");
furtherAvgClientLatency.setName(HTTP_METRICS_NAME_PREFIX + "push.latency.avg");
avgClientLatencyMetric = new ObservableDoubleGaugeMetric(furtherAvgClientLatency, METRIC_NAME, () -> this.avgHTTPPushLatency());
avgClientLatencyMetric.putAll(labelMap);
metrics.put("avgClientLatencyMetric", avgClientLatencyMetric);
//Size of batch message queue.
InstrumentFurther furtherBatchMsgQ = new InstrumentFurther();
furtherBatchMsgQ.setUnit(MetricInstrumentUnit.SINGLETON);
furtherBatchMsgQ.setDescription("Size of batch message queue.");
furtherBatchMsgQ.setName(HTTP_METRICS_NAME_PREFIX + "batch.message.queue.size");
batchMsgQMetric = new ObservableLongGaugeMetric(furtherBatchMsgQ, METRIC_NAME, () -> this.getBatchMsgQueueSize());
batchMsgQMetric.putAll(labelMap);
metrics.put("batchMsgQMetric", batchMsgQMetric);
//Size of send message queue.
InstrumentFurther furtherSendMsgQ = new InstrumentFurther();
furtherSendMsgQ.setUnit(MetricInstrumentUnit.SINGLETON);
furtherSendMsgQ.setDescription("Size of send message queue.");
furtherSendMsgQ.setName(HTTP_METRICS_NAME_PREFIX + "send.message.queue.size");
sendMsgQMetric = new ObservableLongGaugeMetric(furtherSendMsgQ, METRIC_NAME, () -> this.getSendMsgQueueSize());
sendMsgQMetric.putAll(labelMap);
metrics.put("sendMsgQMetric", sendMsgQMetric);
//Size of push message queue.
InstrumentFurther furtherPushMsgQ = new InstrumentFurther();
furtherPushMsgQ.setUnit(MetricInstrumentUnit.SINGLETON);
furtherPushMsgQ.setDescription("Size of push message queue.");
furtherPushMsgQ.setName(HTTP_METRICS_NAME_PREFIX + "push.message.queue.size");
pushMsgQMetric = new ObservableLongGaugeMetric(furtherPushMsgQ, METRIC_NAME, () -> this.getPushMsgQueueSize());
pushMsgQMetric.putAll(labelMap);
metrics.put("pushMsgQMetric", pushMsgQMetric);
//Size of http retry queue.
InstrumentFurther furtherHttpRetryQ = new InstrumentFurther();
furtherHttpRetryQ.setUnit(MetricInstrumentUnit.SINGLETON);
furtherHttpRetryQ.setDescription("Size of http retry queue.");
furtherHttpRetryQ.setName(HTTP_METRICS_NAME_PREFIX + "retry.queue.size");
httpRetryQMetric = new ObservableLongGaugeMetric(furtherHttpRetryQ, METRIC_NAME, () -> this.getHttpRetryQueueSize());
httpRetryQMetric.putAll(labelMap);
metrics.put("httpRetryQMetric", httpRetryQMetric);
//Avg of batch send message cost.
InstrumentFurther furtherBatchAvgSend2MQCost = new InstrumentFurther();
furtherBatchAvgSend2MQCost.setUnit(MetricInstrumentUnit.MILLISECONDS);
furtherBatchAvgSend2MQCost.setDescription("Avg of batch send message cost.");
furtherBatchAvgSend2MQCost.setName(HTTP_METRICS_NAME_PREFIX + "batch.send.message.cost.avg");
batchAvgSend2MQCostMetric = new ObservableDoubleGaugeMetric(furtherBatchAvgSend2MQCost, METRIC_NAME, () -> this.avgBatchSendMsgCost());
batchAvgSend2MQCostMetric.putAll(labelMap);
metrics.put("avgClientLatencyMetric", batchAvgSend2MQCostMetric);
//Avg of send message cost.
InstrumentFurther furtherAvgSend2MQCost = new InstrumentFurther();
furtherAvgSend2MQCost.setUnit(MetricInstrumentUnit.TPS);
furtherAvgSend2MQCost.setDescription("Avg of send message cost.");
furtherAvgSend2MQCost.setName(HTTP_METRICS_NAME_PREFIX + "send.message.cost.avg");
avgSend2MQCostMetric = new ObservableDoubleGaugeMetric(furtherAvgSend2MQCost, METRIC_NAME, () -> this.avgSendMsgCost());
avgSend2MQCostMetric.putAll(labelMap);
metrics.put("avgSend2MQCostMetric", avgSend2MQCostMetric);
//Avg of reply message cost.
InstrumentFurther furtherAvgReply2MQCost = new InstrumentFurther();
furtherAvgReply2MQCost.setUnit(MetricInstrumentUnit.TPS);
furtherAvgReply2MQCost.setDescription("Avg of reply message cost.");
furtherAvgReply2MQCost.setName(HTTP_METRICS_NAME_PREFIX + "reply.message.cost.avg");
avgReply2MQCostMetric = new ObservableDoubleGaugeMetric(furtherAvgReply2MQCost, METRIC_NAME, () -> this.avgReplyMsgCost());
avgReply2MQCostMetric.putAll(labelMap);
metrics.put("avgReply2MQCostMetric", avgReply2MQCostMetric);
}
public Collection<Metric> getMetrics() {
return metrics.values();
}
public double avgHTTPCost() {
return (wholeRequestNum.longValue() == 0L) ? 0f : wholeCost / wholeRequestNum.longValue();
}
public long maxHTTPCost() {
return maxCost.longValue();
}
public long getHttpDiscard() {
return httpDiscard.longValue();
}
public void recordHTTPRequest() {
httpRequestPerSecond.incrementAndGet();
}
public void recordHTTPDiscard() {
httpDiscard.incrementAndGet();
Map<String, String> attributes = new HashMap<>(this.labelMap);
httpDiscardMetric.getInstrument().add(1, buildAttributes(attributes));
}
private static Attributes buildAttributes(final Map<String, String> attributes) {
if (MapUtils.isEmpty(attributes)) {
return EMPTY;
}
AttributesBuilder attributesBuilder = Attributes.builder();
attributes.forEach(attributesBuilder::put);
return attributesBuilder.build();
}
public void snapshotHTTPTPS() {
Integer tps = httpRequestPerSecond.intValue();
httpRequestTPSSnapshots.add(tps);
httpRequestPerSecond.set(0);
if (httpRequestTPSSnapshots.size() > STATIC_PERIOD / 1000) {
httpRequestTPSSnapshots.removeFirst();
}
}
public double maxHTTPTPS() {
return Collections.max(httpRequestTPSSnapshots);
}
public double avgHTTPTPS() {
return avg(httpRequestTPSSnapshots);
}
public void recordHTTPReqResTimeCost(long cost) {
wholeRequestNum.incrementAndGet();
wholeCost = wholeCost + cost;
if (cost > maxCost.longValue()) {
maxCost.set(cost);
}
}
public void httpStatInfoClear() {
wholeRequestNum.set(0L);
wholeCost = 0f;
maxCost.set(0L);
httpDecodeNum.set(0L);
httpDecodeTimeCost = 0f;
}
public void recordDecodeTimeCost(long cost) {
httpDecodeNum.incrementAndGet();
httpDecodeTimeCost = httpDecodeTimeCost + cost;
}
public double avgHTTPBodyDecodeCost() {
return (httpDecodeNum.longValue() == 0L) ? 0d : (double) httpDecodeTimeCost / httpDecodeNum.longValue();
}
public void recordSendBatchMsgDiscard(long delta) {
sendBatchMsgDiscardNumSum.addAndGet(delta);
sendBatchMsgDiscardNumSumMetric.getInstrument().add(delta, buildAttributes(labelMap));
}
public void snapshotSendBatchMsgTPS() {
Integer tps = sendBatchMsgNumPerSecond.intValue();
sendBatchMsgTPSSnapshots.add(tps);
sendBatchMsgNumPerSecond.set(0);
if (sendBatchMsgTPSSnapshots.size() > STATIC_PERIOD / 1000) {
sendBatchMsgTPSSnapshots.removeFirst();
}
}
public double maxSendBatchMsgTPS() {
return Collections.max(sendBatchMsgTPSSnapshots);
}
public double avgSendBatchMsgTPS() {
return avg(sendBatchMsgTPSSnapshots);
}
public void recordSendBatchMsg(long delta) {
sendBatchMsgNumPerSecond.addAndGet(delta);
sendBatchMsgNumSum.addAndGet(delta);
sendBatchMsgNumSumMetric.getInstrument().add(delta, buildAttributes(labelMap));
}
public void recordSendBatchMsgFailed(long delta) {
sendBatchMsgFailNumSum.getAndAdd(delta);
sendBatchMsgFailNumSumMetric.getInstrument().add(delta, buildAttributes(labelMap));
}
public long getSendBatchMsgNumSum() {
return sendBatchMsgNumSum.longValue();
}
public long getSendBatchMsgFailNumSum() {
return sendBatchMsgFailNumSum.longValue();
}
public double getSendBatchMsgFailRate() {
return (sendBatchMsgNumSum.longValue() == 0L) ? 0f : sendBatchMsgFailNumSum.floatValue() / sendBatchMsgNumSum.longValue();
}
public void cleanSendBatchStat() {
sendBatchMsgNumSum.set(0L);
sendBatchMsgFailNumSum.set(0L);
}
public long getSendBatchMsgDiscardNumSum() {
return sendBatchMsgDiscardNumSum.longValue();
}
public void snapshotSendMsgTPS() {
Integer tps = sendMsgNumPerSecond.intValue();
sendMsgTPSSnapshots.add(tps);
sendMsgNumPerSecond.set(0);
if (sendMsgTPSSnapshots.size() > STATIC_PERIOD / 1000) {
sendMsgTPSSnapshots.removeFirst();
}
}
public double maxSendMsgTPS() {
return Collections.max(sendMsgTPSSnapshots);
}
public double avgSendMsgTPS() {
return avg(sendMsgTPSSnapshots);
}
public void recordSendMsg() {
sendMsgNumPerSecond.incrementAndGet();
sendMsgNumSum.incrementAndGet();
sendMsgNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public void recordReplyMsg() {
replyMsgNumSum.incrementAndGet();
replyMsgNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public void recordReplyMsgFailed() {
replyMsgFailNumSum.incrementAndGet();
replyMsgFailNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public long getReplyMsgNumSum() {
return replyMsgNumSum.longValue();
}
public long getReplyMsgFailNumSum() {
return replyMsgFailNumSum.longValue();
}
public long getSendMsgNumSum() {
return sendMsgNumSum.longValue();
}
public long getSendMsgFailNumSum() {
return sendMsgFailNumSum.longValue();
}
public float getSendMsgFailRate() {
return (sendMsgNumSum.longValue() == 0L) ? 0f : sendMsgFailNumSum.floatValue() / sendMsgNumSum.longValue();
}
public void recordSendMsgFailed() {
sendMsgFailNumSum.incrementAndGet();
sendMsgFailNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public void cleanSendMsgStat() {
sendMsgNumSum.set(0L);
replyMsgNumSum.set(0L);
sendMsgFailNumSum.set(0L);
replyMsgFailNumSum.set(0L);
}
public void snapshotPushMsgTPS() {
Integer tps = pushMsgNumPerSecond.intValue();
pushMsgTPSSnapshots.add(tps);
pushMsgNumPerSecond.set(0);
if (pushMsgTPSSnapshots.size() > STATIC_PERIOD / 1000) {
pushMsgTPSSnapshots.removeFirst();
}
}
public void recordHTTPPushTimeCost(long cost) {
wholePushRequestNum.incrementAndGet();
wholePushCost = wholePushCost + cost;
if (cost > maxHttpPushLatency.longValue()) {
maxHttpPushLatency.set(cost);
}
}
public double avgHTTPPushLatency() {
return (wholePushRequestNum.longValue() == 0L) ? 0f : wholePushCost / wholePushRequestNum.longValue();
}
public double maxHTTPPushLatency() {
return maxHttpPushLatency.floatValue();
}
public double maxPushMsgTPS() {
return Collections.max(pushMsgTPSSnapshots);
}
public double avgPushMsgTPS() {
return avg(pushMsgTPSSnapshots);
}
public void recordPushMsg() {
pushMsgNumPerSecond.incrementAndGet();
httpPushMsgNumSum.incrementAndGet();
httpPushMsgNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public long getHttpPushMsgNumSum() {
return httpPushMsgNumSum.longValue();
}
public long getHttpPushFailNumSum() {
return httpPushFailNumSum.longValue();
}
public double getHttpPushMsgFailRate() {
return (httpPushMsgNumSum.longValue() == 0L) ? 0f : httpPushFailNumSum.floatValue() / httpPushMsgNumSum.longValue();
}
public void recordHttpPushMsgFailed() {
sendMsgFailNumSum.incrementAndGet();
sendMsgFailNumSumMetric.getInstrument().add(1, buildAttributes(labelMap));
}
public void cleanHttpPushMsgStat() {
httpPushFailNumSum.set(0L);
httpPushMsgNumSum.set(0L);
wholeRequestNum.set(0L);
wholeCost = 0f;
maxCost.set(0L);
}
public void recordBatchSendMsgCost(long cost) {
batchSend2MQNum.incrementAndGet();
batchSend2MQWholeCost = batchSend2MQWholeCost + cost;
}
public double avgBatchSendMsgCost() {
return (batchSend2MQNum.intValue() == 0) ? 0f : batchSend2MQWholeCost / batchSend2MQNum.intValue();
}
public void recordSendMsgCost(long cost) {
send2MQNum.incrementAndGet();
send2MQWholeCost = send2MQWholeCost + cost;
}
public double avgSendMsgCost() {
return (send2MQNum.intValue() == 0) ? 0f : send2MQWholeCost / send2MQNum.intValue();
}
public void recordReplyMsgCost(long cost) {
reply2MQNum.incrementAndGet();
reply2MQWholeCost = reply2MQWholeCost + cost;
}
public double avgReplyMsgCost() {
return (reply2MQNum.intValue() == 0) ? 0f : reply2MQWholeCost / reply2MQNum.intValue();
}
public void send2MQStatInfoClear() {
batchSend2MQWholeCost = 0f;
batchSend2MQNum.set(0L);
send2MQWholeCost = 0f;
send2MQNum.set(0L);
reply2MQWholeCost = 0f;
reply2MQNum.set(0L);
}
public long getBatchMsgQueueSize() {
return batchMsgExecutor.getQueue().size();
}
public long getSendMsgQueueSize() {
return sendMsgExecutor.getQueue().size();
}
public long getPushMsgQueueSize() {
return pushMsgExecutor.getQueue().size();
}
public long getHttpRetryQueueSize() {
return httpFailedQueue.size();
}
private float avg(LinkedList<Integer> linkedList) {
if (linkedList.isEmpty()) {
return 0.0f;
}
int sum = linkedList.stream().reduce(Integer::sum).get();
return (float) sum / linkedList.size();
}
}
|
apache/paimon | 35,413 | paimon-benchmark/paimon-cluster-benchmark/src/main/java/org/apache/paimon/benchmark/metric/cpu/ProcfsBasedProcessTree.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.paimon.benchmark.metric.cpu;
import org.apache.paimon.benchmark.metric.cpu.clock.Clock;
import org.apache.paimon.benchmark.metric.cpu.clock.SystemClock;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.AndFileFilter;
import org.apache.commons.io.filefilter.DirectoryFileFilter;
import org.apache.commons.io.filefilter.RegexFileFilter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A Proc file-system based ProcessTree. Works only on Linux. Based on ProcfsBasedProcessTree from
* YARN project.
*/
public class ProcfsBasedProcessTree {
private static final String PROCFS = "/proc/";
private static final String SELF = "self";
private static final Pattern PROCFS_STAT_FILE_FORMAT =
Pattern.compile(
"^([\\d-]+)\\s\\((.*)\\)\\s[^\\s]\\s([\\d-]+)\\s([\\d-]+)\\s"
+ "([\\d-]+)\\s([\\d-]+\\s){7}(\\d+)\\s(\\d+)\\s([\\d-]+\\s){7}(\\d+)\\s"
+ "(\\d+)(\\s[\\d-]+){15}");
public static final int UNAVAILABLE = -1;
public static final String PROCFS_STAT_FILE = "stat";
public static final String PROCFS_CMDLINE_FILE = "cmdline";
public static final long PAGE_SIZE = SysInfoLinux.PAGE_SIZE;
public static final long JIFFY_LENGTH_IN_MILLIS =
SysInfoLinux.JIFFY_LENGTH_IN_MILLIS; // in millisecond
private final CpuTimeTracker cpuTimeTracker;
private Clock clock;
enum MemInfo {
SIZE("Size"),
RSS("Rss"),
PSS("Pss"),
SHARED_CLEAN("Shared_Clean"),
SHARED_DIRTY("Shared_Dirty"),
PRIVATE_CLEAN("Private_Clean"),
PRIVATE_DIRTY("Private_Dirty"),
REFERENCED("Referenced"),
ANONYMOUS("Anonymous"),
ANON_HUGE_PAGES("AnonHugePages"),
SWAP("swap"),
KERNEL_PAGE_SIZE("kernelPageSize"),
MMU_PAGE_SIZE("mmuPageSize"),
INVALID("invalid");
private String name;
private MemInfo(String name) {
this.name = name;
}
public static MemInfo getMemInfoByName(String name) {
String searchName = StringUtils.trimToNull(name);
for (MemInfo info : MemInfo.values()) {
if (info.name.trim().equalsIgnoreCase(searchName)) {
return info;
}
}
return INVALID;
}
}
public static final String SMAPS = "smaps";
public static final int KB_TO_BYTES = 1024;
private static final String KB = "kB";
private static final String READ_ONLY_WITH_SHARED_PERMISSION = "r--s";
private static final String READ_EXECUTE_WITH_SHARED_PERMISSION = "r-xs";
private static final Pattern ADDRESS_PATTERN =
Pattern.compile("([[a-f]|(0-9)]*)-([[a-f]|(0-9)]*)(\\s)*([rxwps\\-]*)");
private static final Pattern MEM_INFO_PATTERN = Pattern.compile("(^[A-Z].*):[\\s ]*(\\d+).*");
private boolean smapsEnabled;
protected Map<String, ProcessTreeSmapMemInfo> processSMAPTree =
new HashMap<String, ProcessTreeSmapMemInfo>();
// to enable testing, using this variable which can be configured
// to a test directory.
private String procfsDir;
private static String deadPid = "-1";
private String pid = deadPid;
private static Pattern numberPattern = Pattern.compile("[1-9][0-9]*");
private long cpuTime = UNAVAILABLE;
protected Map<String, ProcessInfo> processTree = new HashMap<String, ProcessInfo>();
public ProcfsBasedProcessTree() throws IOException {
this(new File(PROCFS, SELF).getCanonicalFile().getName());
}
public ProcfsBasedProcessTree(boolean smapsEnabled) throws IOException {
this(new File(PROCFS, SELF).getCanonicalFile().getName(), smapsEnabled);
}
public ProcfsBasedProcessTree(String pid) {
this(pid, PROCFS);
}
public ProcfsBasedProcessTree(String pid, boolean smapsEnabled) {
this(pid, PROCFS, SystemClock.getInstance(), smapsEnabled);
}
public ProcfsBasedProcessTree(String pid, String procfsDir) {
this(pid, procfsDir, SystemClock.getInstance(), false);
}
/**
* Build a new process tree rooted at the pid.
*
* <p>This method is provided mainly for testing purposes, where the root of the proc file
* system can be adjusted.
*
* @param pid root of the process tree
* @param procfsDir the root of a proc file system - only used for testing.
* @param clock clock for controlling time for testing
*/
public ProcfsBasedProcessTree(String pid, String procfsDir, Clock clock, boolean smapsEnabled) {
this.clock = clock;
this.pid = getValidPID(pid);
this.procfsDir = procfsDir;
this.cpuTimeTracker = new CpuTimeTracker(JIFFY_LENGTH_IN_MILLIS);
this.smapsEnabled = smapsEnabled;
}
public void setSmapsEnabled(boolean smapsEnabled) {
this.smapsEnabled = smapsEnabled;
}
/**
* Update process-tree with latest state. If the root-process is not alive, tree will be empty.
*/
public void updateProcessTree() {
if (!pid.equals(deadPid)) {
// Get the list of processes
List<String> processList = getProcessList();
Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();
// cache the processTree to get the age for processes
Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
processTree.clear();
ProcessInfo me = null;
for (String proc : processList) {
// Get information for each process
ProcessInfo pInfo = new ProcessInfo(proc);
if (constructProcessInfo(pInfo, procfsDir) != null) {
allProcessInfo.put(proc, pInfo);
if (proc.equals(this.pid)) {
me = pInfo; // cache 'me'
processTree.put(proc, pInfo);
}
}
}
if (me == null) {
return;
}
// Add each process to its parent.
for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
String pID = entry.getKey();
if (!"1".equals(pID)) {
ProcessInfo pInfo = entry.getValue();
String ppid = pInfo.getPpid();
// If parent is init and process is not session leader,
// attach to sessionID
if ("1".equals(ppid)) {
String sid = pInfo.getSessionId().toString();
if (!pID.equals(sid)) {
ppid = sid;
}
}
ProcessInfo parentPInfo = allProcessInfo.get(ppid);
if (parentPInfo != null) {
parentPInfo.addChild(pInfo);
}
}
}
// now start constructing the process-tree
List<ProcessInfo> children = me.getChildren();
Queue<ProcessInfo> pInfoQueue = new ArrayDeque<ProcessInfo>(children);
while (!pInfoQueue.isEmpty()) {
ProcessInfo pInfo = pInfoQueue.remove();
if (!processTree.containsKey(pInfo.getPid())) {
processTree.put(pInfo.getPid(), pInfo);
}
pInfoQueue.addAll(pInfo.getChildren());
}
// update age values and compute the number of jiffies since last update
for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
ProcessInfo oldInfo = oldProcs.get(procs.getKey());
if (procs.getValue() != null) {
procs.getValue().updateJiffy(oldInfo);
if (oldInfo != null) {
procs.getValue().updateAge(oldInfo);
}
}
}
if (smapsEnabled) {
// Update smaps info
processSMAPTree.clear();
for (ProcessInfo p : processTree.values()) {
if (p != null) {
// Get information for each process
ProcessTreeSmapMemInfo memInfo = new ProcessTreeSmapMemInfo(p.getPid());
constructProcessSMAPInfo(memInfo, procfsDir);
processSMAPTree.put(p.getPid(), memInfo);
}
}
}
}
}
/**
* Verify that the given process id is same as its process group id.
*
* @return true if the process id matches else return false.
*/
public boolean checkPidPgrpidForMatch() {
return checkPidPgrpidForMatch(pid, PROCFS);
}
public static boolean checkPidPgrpidForMatch(String _pid, String procfs) {
// Get information for this process
ProcessInfo pInfo = new ProcessInfo(_pid);
pInfo = constructProcessInfo(pInfo, procfs);
// null if process group leader finished execution; issue no warning
// make sure that pid and its pgrpId match
if (pInfo == null) {
return true;
}
String pgrpId = pInfo.getPgrpId().toString();
return pgrpId.equals(_pid);
}
private static final String PROCESSTREE_DUMP_FORMAT = "\t|- %s %s %d %d %s %d %d %d %d %s%n";
public List<String> getCurrentProcessIDs() {
return Collections.unmodifiableList(new ArrayList<>(processTree.keySet()));
}
/**
* Get a dump of the process-tree.
*
* @return a string concatenating the dump of information of all the processes in the
* process-tree
*/
public String getProcessTreeDump() {
StringBuilder ret = new StringBuilder();
// The header.
ret.append(
String.format(
"\t|- PID PPID PGRPID SESSID CMD_NAME "
+ "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
+ "RSSMEM_USAGE(PAGES) FULL_CMD_LINE%n"));
for (ProcessInfo p : processTree.values()) {
if (p != null) {
ret.append(
String.format(
PROCESSTREE_DUMP_FORMAT,
p.getPid(),
p.getPpid(),
p.getPgrpId(),
p.getSessionId(),
p.getName(),
p.getUtime(),
p.getStime(),
p.getVmem(),
p.getRssmemPage(),
p.getCmdLine(procfsDir)));
}
}
return ret.toString();
}
public long getVirtualMemorySize() {
return getVirtualMemorySize(0);
}
public long getVirtualMemorySize(int olderThanAge) {
long total = 0L;
boolean isAvailable = false;
for (ProcessInfo p : processTree.values()) {
if (p != null) {
isAvailable = true;
if (p.getAge() > olderThanAge) {
total += p.getVmem();
}
}
}
return isAvailable ? total : UNAVAILABLE;
}
public long getRssMemorySize() {
return getRssMemorySize(0);
}
public long getRssMemorySize(int olderThanAge) {
if (PAGE_SIZE < 0) {
return UNAVAILABLE;
}
if (smapsEnabled) {
return getSmapBasedRssMemorySize(olderThanAge);
}
boolean isAvailable = false;
long totalPages = 0;
for (ProcessInfo p : processTree.values()) {
if (p != null) {
isAvailable = true;
if (p.getAge() > olderThanAge) {
totalPages += p.getRssmemPage();
}
}
}
return isAvailable ? totalPages * PAGE_SIZE : UNAVAILABLE; // convert # pages to byte
}
/**
* Get the resident set size (RSS) memory used by all the processes in the process-tree that are
* older than the passed in age. RSS is calculated based on SMAP information. Skip mappings with
* "r--s", "r-xs" permissions to get real RSS usage of the process.
*
* @param olderThanAge processes above this age are included in the memory addition
* @return rss memory used by the process-tree in bytes, for processes older than this age.
* return {@link #UNAVAILABLE} if it cannot be calculated.
*/
private long getSmapBasedRssMemorySize(int olderThanAge) {
long total = UNAVAILABLE;
for (ProcessInfo p : processTree.values()) {
if (p != null) {
// set resource to 0 instead of UNAVAILABLE
if (total == UNAVAILABLE) {
total = 0;
}
if (p.getAge() > olderThanAge) {
ProcessTreeSmapMemInfo procMemInfo = processSMAPTree.get(p.getPid());
if (procMemInfo != null) {
for (ProcessSmapMemoryInfo info : procMemInfo.getMemoryInfoList()) {
// Do not account for r--s or r-xs mappings
if (info.getPermission()
.trim()
.equalsIgnoreCase(READ_ONLY_WITH_SHARED_PERMISSION)
|| info.getPermission()
.trim()
.equalsIgnoreCase(
READ_EXECUTE_WITH_SHARED_PERMISSION)) {
continue;
}
// Account for anonymous to know the amount of
// memory reclaimable by killing the process
total += info.anonymous;
}
}
}
}
}
if (total > 0) {
total *= KB_TO_BYTES; // convert to bytes
}
return total; // size
}
public long getCumulativeCpuTime() {
if (JIFFY_LENGTH_IN_MILLIS < 0) {
return UNAVAILABLE;
}
long incJiffies = 0;
boolean isAvailable = false;
for (ProcessInfo p : processTree.values()) {
if (p != null) {
// data is available
isAvailable = true;
incJiffies += p.getDtime();
}
}
if (isAvailable) {
// reset cpuTime to 0 instead of UNAVAILABLE
if (cpuTime == UNAVAILABLE) {
cpuTime = 0L;
}
cpuTime += incJiffies * JIFFY_LENGTH_IN_MILLIS;
}
return cpuTime;
}
private BigInteger getTotalProcessJiffies() {
BigInteger totalStime = BigInteger.ZERO;
long totalUtime = 0;
for (ProcessInfo p : processTree.values()) {
if (p != null) {
totalUtime += p.getUtime();
totalStime = totalStime.add(p.getStime());
}
}
return totalStime.add(BigInteger.valueOf(totalUtime));
}
/**
* Get the CPU usage by all the processes in the process-tree in Unix. Note: UNAVAILABLE will be
* returned in case when CPU usage is not available. It is NOT advised to return any other error
* code.
*
* @return percentage CPU usage since the process-tree was created, {@link #UNAVAILABLE} if CPU
* usage cannot be calculated or not available.
*/
public float getCpuUsagePercent() {
BigInteger processTotalJiffies = getTotalProcessJiffies();
cpuTimeTracker.updateElapsedJiffies(processTotalJiffies, clock.absoluteTimeMillis());
return cpuTimeTracker.getCpuTrackerUsagePercent();
}
private static String getValidPID(String pid) {
if (pid == null) {
return deadPid;
}
Matcher m = numberPattern.matcher(pid);
if (m.matches()) {
return pid;
}
return deadPid;
}
/** Get the list of all processes in the system. */
private List<String> getProcessList() {
List<String> processList = Collections.emptyList();
FileFilter procListFileFilter =
new AndFileFilter(DirectoryFileFilter.INSTANCE, new RegexFileFilter(numberPattern));
File dir = new File(procfsDir);
File[] processDirs = dir.listFiles(procListFileFilter);
if (ArrayUtils.isNotEmpty(processDirs)) {
processList = new ArrayList<String>(processDirs.length);
for (File processDir : processDirs) {
processList.add(processDir.getName());
}
}
return processList;
}
/**
* Construct the ProcessInfo using the process' PID and procfs rooted at the specified directory
* and return the same. It is provided mainly to assist testing purposes.
*
* <p>Returns null on failing to read from procfs,
*
* @param pinfo ProcessInfo that needs to be updated
* @param procfsDir root of the proc file system
* @return updated ProcessInfo, null on errors.
*/
private static ProcessInfo constructProcessInfo(ProcessInfo pinfo, String procfsDir) {
ProcessInfo ret = null;
// Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
BufferedReader in = null;
InputStreamReader fReader = null;
try {
File pidDir = new File(procfsDir, pinfo.getPid());
fReader =
new InputStreamReader(
new FileInputStream(new File(pidDir, PROCFS_STAT_FILE)),
Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
return ret;
}
ret = pinfo;
try {
String str = in.readLine(); // only one line
Matcher m = PROCFS_STAT_FILE_FORMAT.matcher(str);
boolean mat = m.find();
if (mat) {
String processName = "(" + m.group(2) + ")";
// Set (name) (ppid) (pgrpId) (session) (utime) (stime) (vsize) (rss)
pinfo.updateProcessInfo(
processName,
m.group(3),
Integer.parseInt(m.group(4)),
Integer.parseInt(m.group(5)),
Long.parseLong(m.group(7)),
new BigInteger(m.group(8)),
Long.parseLong(m.group(10)),
Long.parseLong(m.group(11)));
} else {
ret = null;
}
} catch (IOException io) {
ret = null;
} finally {
// Close the streams
try {
fReader.close();
try {
in.close();
} catch (IOException i) {
}
} catch (IOException i) {
}
}
return ret;
}
/**
* Returns a string printing PIDs of process present in the ProcfsBasedProcessTree. Output
* format : [pid pid ..]
*/
@Override
public String toString() {
StringBuffer pTree = new StringBuffer("[ ");
for (String p : processTree.keySet()) {
pTree.append(p);
pTree.append(" ");
}
return pTree.substring(0, pTree.length()) + "]";
}
/** Returns boolean indicating whether pid is in process tree. */
public boolean contains(String pid) {
return processTree.containsKey(pid);
}
/** Class containing information of a process. */
private static class ProcessInfo {
private String pid; // process-id
private String name; // command name
private Integer pgrpId; // process group-id
private String ppid; // parent process-id
private Integer sessionId; // session-id
private Long vmem; // virtual memory usage
private Long rssmemPage; // rss memory usage in # of pages
private Long utime = 0L; // # of jiffies in user mode
private final BigInteger MAX_LONG = BigInteger.valueOf(Long.MAX_VALUE);
private BigInteger stime = new BigInteger("0"); // # of jiffies in kernel mode
// how many times has this process been seen alive
private int age;
// # of jiffies used since last update:
private Long dtime = 0L;
// dtime = (utime + stime) - (utimeOld + stimeOld)
// We need this to compute the cumulative CPU time
// because the subprocess may finish earlier than root process
private List<ProcessInfo> children = new ArrayList<ProcessInfo>(); // list of children
public ProcessInfo(String pid) {
this.pid = pid;
// seeing this the first time.
this.age = 1;
}
public String getPid() {
return pid;
}
public String getName() {
return name;
}
public Integer getPgrpId() {
return pgrpId;
}
public String getPpid() {
return ppid;
}
public Integer getSessionId() {
return sessionId;
}
public Long getVmem() {
return vmem;
}
public Long getUtime() {
return utime;
}
public BigInteger getStime() {
return stime;
}
public Long getDtime() {
return dtime;
}
public Long getRssmemPage() { // get rss # of pages
return rssmemPage;
}
public int getAge() {
return age;
}
public void updateProcessInfo(
String name,
String ppid,
Integer pgrpId,
Integer sessionId,
Long utime,
BigInteger stime,
Long vmem,
Long rssmem) {
this.name = name;
this.ppid = ppid;
this.pgrpId = pgrpId;
this.sessionId = sessionId;
this.utime = utime;
this.stime = stime;
this.vmem = vmem;
this.rssmemPage = rssmem;
}
public void updateJiffy(ProcessInfo oldInfo) {
if (oldInfo == null) {
BigInteger sum = this.stime.add(BigInteger.valueOf(this.utime));
if (sum.compareTo(MAX_LONG) > 0) {
this.dtime = 0L;
} else {
this.dtime = sum.longValue();
}
return;
}
this.dtime =
(this.utime - oldInfo.utime + this.stime.subtract(oldInfo.stime).longValue());
}
public void updateAge(ProcessInfo oldInfo) {
this.age = oldInfo.age + 1;
}
public boolean addChild(ProcessInfo p) {
return children.add(p);
}
public List<ProcessInfo> getChildren() {
return children;
}
public String getCmdLine(String procfsDir) {
String ret = "N/A";
if (pid == null) {
return ret;
}
BufferedReader in = null;
InputStreamReader fReader = null;
try {
fReader =
new InputStreamReader(
new FileInputStream(
new File(new File(procfsDir, pid), PROCFS_CMDLINE_FILE)),
Charset.forName("UTF-8"));
} catch (FileNotFoundException f) {
// The process vanished in the interim!
return ret;
}
in = new BufferedReader(fReader);
try {
ret = in.readLine(); // only one line
if (ret == null) {
ret = "N/A";
} else {
ret = ret.replace('\0', ' '); // Replace each null char with a space
if (ret.isEmpty()) {
// The cmdline might be empty because the process is swapped out or
// is a zombie.
ret = "N/A";
}
}
} catch (IOException io) {
ret = "N/A";
} finally {
// Close the streams
try {
fReader.close();
try {
in.close();
} catch (IOException i) {
}
} catch (IOException i) {
}
}
return ret;
}
}
/**
* Update memory related information
*
* @param pInfo
* @param procfsDir
*/
private static void constructProcessSMAPInfo(ProcessTreeSmapMemInfo pInfo, String procfsDir) {
BufferedReader in = null;
InputStreamReader fReader = null;
try {
File pidDir = new File(procfsDir, pInfo.getPid());
File file = new File(pidDir, SMAPS);
if (!file.exists()) {
return;
}
fReader = new InputStreamReader(new FileInputStream(file), Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
ProcessSmapMemoryInfo memoryMappingInfo = null;
List<String> lines = IOUtils.readLines(in);
for (String line : lines) {
line = line.trim();
try {
Matcher address = ADDRESS_PATTERN.matcher(line);
if (address.find()) {
memoryMappingInfo = new ProcessSmapMemoryInfo(line);
memoryMappingInfo.setPermission(address.group(4));
pInfo.getMemoryInfoList().add(memoryMappingInfo);
continue;
}
Matcher memInfo = MEM_INFO_PATTERN.matcher(line);
if (memInfo.find()) {
String key = memInfo.group(1).trim();
String value = memInfo.group(2);
if (memoryMappingInfo != null) {
memoryMappingInfo.setMemInfo(key, value);
}
}
} catch (Throwable t) {
}
}
} catch (Throwable t) {
} finally {
IOUtils.closeQuietly(in);
}
}
/** Placeholder for process's SMAPS information */
static class ProcessTreeSmapMemInfo {
private String pid;
private List<ProcessSmapMemoryInfo> memoryInfoList;
public ProcessTreeSmapMemInfo(String pid) {
this.pid = pid;
this.memoryInfoList = new LinkedList<ProcessSmapMemoryInfo>();
}
public List<ProcessSmapMemoryInfo> getMemoryInfoList() {
return memoryInfoList;
}
public String getPid() {
return pid;
}
public String toString() {
StringBuilder sb = new StringBuilder();
for (ProcessSmapMemoryInfo info : memoryInfoList) {
sb.append("\n");
sb.append(info);
}
return sb.toString();
}
}
/**
*
*
* <pre>
* Private Pages : Pages that were mapped only by the process
* Shared Pages : Pages that were shared with other processes
*
* Clean Pages : Pages that have not been modified since they were mapped
* Dirty Pages : Pages that have been modified since they were mapped
*
* Private RSS = Private Clean Pages + Private Dirty Pages
* Shared RSS = Shared Clean Pages + Shared Dirty Pages
* RSS = Private RSS + Shared RSS
* PSS = The count of all pages mapped uniquely by the process,
* plus a fraction of each shared page, said fraction to be
* proportional to the number of processes which have mapped the page.
*
* </pre>
*/
static class ProcessSmapMemoryInfo {
private int size;
private int rss;
private int pss;
private int sharedClean;
private int sharedDirty;
private int privateClean;
private int privateDirty;
private int anonymous;
private int referenced;
private String regionName;
private String permission;
public ProcessSmapMemoryInfo(String name) {
this.regionName = name;
}
public String getName() {
return regionName;
}
public void setPermission(String permission) {
this.permission = permission;
}
public String getPermission() {
return permission;
}
public int getSize() {
return size;
}
public int getRss() {
return rss;
}
public int getPss() {
return pss;
}
public int getSharedClean() {
return sharedClean;
}
public int getSharedDirty() {
return sharedDirty;
}
public int getPrivateClean() {
return privateClean;
}
public int getPrivateDirty() {
return privateDirty;
}
public int getReferenced() {
return referenced;
}
public int getAnonymous() {
return anonymous;
}
public void setMemInfo(String key, String value) {
MemInfo info = MemInfo.getMemInfoByName(key);
int val = 0;
try {
val = Integer.parseInt(value.trim());
} catch (NumberFormatException ne) {
return;
}
if (info == null) {
return;
}
switch (info) {
case SIZE:
size = val;
break;
case RSS:
rss = val;
break;
case PSS:
pss = val;
break;
case SHARED_CLEAN:
sharedClean = val;
break;
case SHARED_DIRTY:
sharedDirty = val;
break;
case PRIVATE_CLEAN:
privateClean = val;
break;
case PRIVATE_DIRTY:
privateDirty = val;
break;
case REFERENCED:
referenced = val;
break;
case ANONYMOUS:
anonymous = val;
break;
default:
break;
}
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("\t").append(this.getName()).append("\n");
sb.append("\t").append(MemInfo.SIZE.name + ":" + this.getSize()).append(" kB\n");
sb.append("\t").append(MemInfo.PSS.name + ":" + this.getPss()).append(" kB\n");
sb.append("\t").append(MemInfo.RSS.name + ":" + this.getRss()).append(" kB\n");
sb.append("\t")
.append(MemInfo.SHARED_CLEAN.name + ":" + this.getSharedClean())
.append(" kB\n");
sb.append("\t")
.append(MemInfo.SHARED_DIRTY.name + ":" + this.getSharedDirty())
.append(" kB\n");
sb.append("\t")
.append(MemInfo.PRIVATE_CLEAN.name + ":" + this.getPrivateClean())
.append(" kB\n");
sb.append("\t")
.append(MemInfo.PRIVATE_DIRTY.name + ":" + this.getPrivateDirty())
.append(" kB\n");
sb.append("\t")
.append(MemInfo.REFERENCED.name + ":" + this.getReferenced())
.append(" kB\n");
sb.append("\t")
.append(MemInfo.ANONYMOUS.name + ":" + this.getAnonymous())
.append(" kB\n");
return sb.toString();
}
}
/**
* Test the {@link ProcfsBasedProcessTree}
*
* @param args
*/
public static void main(String[] args) {
if (args.length != 1) {
System.out.println("Provide <pid of process to monitor>");
return;
}
System.out.println("Creating ProcfsBasedProcessTree for process " + args[0]);
ProcfsBasedProcessTree procfsBasedProcessTree = new ProcfsBasedProcessTree(args[0]);
procfsBasedProcessTree.updateProcessTree();
System.out.println(procfsBasedProcessTree.getProcessTreeDump());
System.out.println("Get cpu usage " + procfsBasedProcessTree.getCpuUsagePercent());
try {
// Sleep so we can compute the CPU usage
Thread.sleep(500L);
} catch (InterruptedException e) {
// do nothing
}
procfsBasedProcessTree.updateProcessTree();
System.out.println(procfsBasedProcessTree.getProcessTreeDump());
System.out.println("Cpu usage " + procfsBasedProcessTree.getCpuUsagePercent());
System.out.println("Vmem usage in bytes " + procfsBasedProcessTree.getVirtualMemorySize());
System.out.println("Rss mem usage in bytes " + procfsBasedProcessTree.getRssMemorySize());
}
}
|
apache/flink | 35,035 | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/optimize/program/DynamicPartitionPruningProgramTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.optimize.program;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.config.OptimizerConfigOptions;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.catalog.stats.CatalogTableStatistics;
import org.apache.flink.table.planner.factories.TestValuesCatalog;
import org.apache.flink.table.planner.utils.BatchTableTestUtil;
import org.apache.flink.table.planner.utils.TableTestBase;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import static org.apache.flink.table.api.Expressions.col;
/**
* Tests for rules that extend {@link FlinkDynamicPartitionPruningProgram} to create {@link
* org.apache.flink.table.planner.plan.nodes.physical.batch.BatchPhysicalDynamicFilteringTableSourceScan}.
*/
class DynamicPartitionPruningProgramTest extends TableTestBase {
private final BatchTableTestUtil util = batchTestUtil(TableConfig.getDefault());
private final TestValuesCatalog catalog =
new TestValuesCatalog("testCatalog", "test_database", true);
@BeforeEach
void setup() {
catalog.open();
util.tableEnv().registerCatalog("testCatalog", catalog);
util.tableEnv().useCatalog("testCatalog");
TableConfig tableConfig = util.tableEnv().getConfig();
tableConfig.set(OptimizerConfigOptions.TABLE_OPTIMIZER_DYNAMIC_FILTERING_ENABLED, true);
// partition fact table.
util.tableEnv()
.executeSql(
"CREATE TABLE fact_part (\n"
+ " id BIGINT,\n"
+ " name STRING,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " fact_date_sk BIGINT\n"
+ ") PARTITIONED BY (fact_date_sk)\n"
+ "WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'NewSource',\n"
+ " 'partition-list' = 'fact_date_sk:1990;fact_date_sk:1991;fact_date_sk:1992',\n"
+ " 'dynamic-filtering-fields' = 'fact_date_sk;amount',\n"
+ " 'bounded' = 'true'\n"
+ ")");
// dim table.
util.tableEnv()
.executeSql(
"CREATE TABLE dim (\n"
+ " id BIGINT,\n"
+ " male BOOLEAN,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " dim_date_sk BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'NewSource',\n"
+ " 'bounded' = 'true'\n"
+ ")");
}
@Test
void testLargeQueryPlanShouldNotOutOfMemoryWithTableApi() {
// TABLE_OPTIMIZER_DYNAMIC_FILTERING_ENABLED is already enabled
List<String> selectStmts = new ArrayList<>();
for (int i = 0; i < 100; i++) {
util.tableEnv()
.executeSql(
"CREATE TABLE IF NOT EXISTS table"
+ i
+ "(att STRING,filename STRING) "
+ "with("
+ " 'connector' = 'values', "
+ " 'runtime-source' = 'NewSource', "
+ " 'bounded' = 'true'"
+ ")");
selectStmts.add("select att,filename from table" + i);
}
final String countName = "CNM";
Table allUnionTable = util.tableEnv().sqlQuery(String.join(" UNION ALL ", selectStmts));
Table res =
allUnionTable.join(
allUnionTable
.groupBy(col("att"))
.select(col("att"), col("att").count().as(countName))
.filter(col(countName).isGreater(1))
.select(col("att").as("l_key")),
col("att").isEqual(col("l_key")));
util.verifyExecPlan(res);
// clear resources
for (int i = 0; i < 100; i++) {
util.tableEnv().executeSql("DROP TABLE IF EXISTS table" + i);
}
}
@Test
void testLargeQueryPlanShouldNotOutOfMemoryWithSqlApi() {
// TABLE_OPTIMIZER_DYNAMIC_FILTERING_ENABLED is already enabled
List<String> selectStmts = new ArrayList<>();
for (int i = 0; i < 100; i++) {
util.tableEnv()
.executeSql(
"CREATE TABLE IF NOT EXISTS table"
+ i
+ "(att STRING,filename STRING) "
+ "with("
+ " 'connector' = 'values', "
+ " 'runtime-source' = 'NewSource', "
+ " 'bounded' = 'true'"
+ ")");
selectStmts.add("select att,filename from table" + i);
}
final String countName = "CNM";
final String unionSelectStmts = String.join(" UNION ALL ", selectStmts);
final String groupedUnionStmt =
String.format(
"SELECT att as l_key, COUNT(att) AS %s "
+ "FROM (%s) "
+ "GROUP BY att "
+ "HAVING COUNT(att) > 1 ",
countName, unionSelectStmts);
final String joinedUnionStmt =
String.format(
"SELECT * FROM (%s) as t1 INNER JOIN (%s) as t2 ON t1.att = t2.l_key",
unionSelectStmts, groupedUnionStmt);
Table resultTable = util.tableEnv().sqlQuery(joinedUnionStmt);
util.verifyExecPlan(resultTable);
// clear resources
for (int i = 0; i < 100; i++) {
util.tableEnv().executeSql("DROP TABLE IF EXISTS table" + i);
}
}
@Test
void testDimTableFilteringFieldsNotInJoinKey() {
// fact_part.id not in dynamic-filtering-fields, so dynamic partition pruning will not
// succeed.
String query =
"Select * from dim, fact_part where fact_part.id = dim.id and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDimTableWithoutFilter() {
// If dim side without filters, dynamic partition pruning will not succeed.
String query =
"Select * from dim, fact_part where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.price > 100";
util.verifyRelPlan(query);
}
@Test
void testDimTableWithUnsuitableFilter() {
// For filters in dim table side, they need to filter enough partitions. Like NOT NULL will
// not succeed for dynamic partition pruning.
String query =
"Select * from dim join fact_part on fact_part.fact_date_sk = dim.dim_date_sk where dim.id is not null";
util.verifyRelPlan(query);
}
@Test
void testFactTableIsNotPartitionTable() {
// non-partition fact table. Dynamic partition pruning will not succeed if fact side is not
// partition table.
util.tableEnv()
.executeSql(
"CREATE TABLE none_part_fact (\n"
+ " id BIGINT,\n"
+ " name STRING,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " fact_date_sk BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'NewSource',\n"
+ " 'dynamic-filtering-fields' = 'fact_date_sk;amount',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"Select * from dim, none_part_fact where none_part_fact.fact_date_sk = dim.dim_date_sk"
+ " and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testFactTableIsLegacySource() {
util.tableEnv()
.executeSql(
"CREATE TABLE legacy_source (\n"
+ " id BIGINT,\n"
+ " name STRING,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " fact_date_sk BIGINT\n"
+ ") PARTITIONED BY (fact_date_sk)\n"
+ "WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'SourceFunction',\n"
+ " 'partition-list' = 'fact_date_sk:1990;fact_date_sk:1991;fact_date_sk:1992',\n"
+ " 'dynamic-filtering-fields' = 'fact_date_sk;amount',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"Select * from dim, legacy_source where legacy_source.fact_date_sk = dim.dim_date_sk"
+ " and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDimTableWithFilterPushDown() {
// Even though have filter push down, dynamic partition pruning will succeed.
String query =
"Select * from fact_part join (Select * from dim) t1"
+ " on fact_part.fact_date_sk = dim_date_sk where t1.price < 500";
util.verifyRelPlan(query);
}
@Test
void testJoinKeyIsDynamicFilterFieldNotPartitionKey() {
// Not only partition key, but also dynamic filtering field in join key will succeed in
// dynamic partition pruning.
String query =
"Select * from dim, fact_part where fact_part.amount = dim.amount and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInRightRule() throws TableNotExistException {
// Base rule.
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(1, 1, 1, 1);
catalog.alterTableStatistics(
new ObjectPath("test_database", "dim"), tableStatistics, false);
String query =
"Select * from dim, fact_part where fact_part.fact_date_sk = dim.dim_date_sk and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInLeftRule() throws TableNotExistException {
// Base rule.
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(1, 1, 1, 1);
catalog.alterTableStatistics(
new ObjectPath("test_database", "dim"), tableStatistics, false);
String query =
"Select * from fact_part, dim where fact_part.fact_date_sk = dim.dim_date_sk and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInRightWithExchangeRule() {
// Base rule.
String query =
"Select * from dim, fact_part where fact_part.fact_date_sk = dim.dim_date_sk and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInLeftWithExchangeRule() {
// Base rule.
String query =
"Select * from fact_part, dim where fact_part.fact_date_sk = dim.dim_date_sk and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInRightWithCalcRule() throws TableNotExistException {
// Base rule.
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(1, 1, 1, 1);
catalog.alterTableStatistics(
new ObjectPath("test_database", "dim"), tableStatistics, false);
String query =
"Select * from dim, fact_part where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInLeftWithCalcRule() throws TableNotExistException {
// Base rule.
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(1, 1, 1, 1);
catalog.alterTableStatistics(
new ObjectPath("test_database", "dim"), tableStatistics, false);
String query =
"Select * from fact_part, dim where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInRightWithExchangeAndCalcRule() {
// Base rule.
String query =
"Select * from dim, fact_part where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFactInLeftWithExchangeAndCalcRule() {
// Base rule.
String query =
"Select * from fact_part, dim where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testComplexCalcInFactSide() {
// Although the partition key is converted, Dynamic Partition pruning can be successfully
// applied.
String query =
"Select * from dim join (select fact_date_sk as fact_date_sk1, price + 1 as price1 from fact_part) t1"
+ " on t1.fact_date_sk1 = dim_date_sk and t1.price1 > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testPartitionKeysIsComputeColumnsInFactSide() {
// Dynamic filtering will not succeed for this query.
String query =
"Select * from dim join (select fact_date_sk + 1 as fact_date_sk1, price + 1 as price1 from fact_part) t1"
+ " on t1.fact_date_sk1 = dim_date_sk and t1.price1 > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testPartitionKeysOrderIsChangedInFactSide() {
// Dynamic filtering will succeed for this query.
String query =
"Select * from dim join (select fact_date_sk, id, name, amount, price from fact_part) t1"
+ " on t1.fact_date_sk = dim_date_sk and t1.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testPartitionKeysNameIsChangedInFactSide() {
// Dynamic filtering will succeed for this query.
String query =
"Select * from dim join (select id, name, amount, price, fact_date_sk as fact_date_sk1 from fact_part) t1"
+ " on t1.fact_date_sk1 = dim_date_sk and t1.price > 200 and dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testDynamicFilteringFieldIsComputeColumnsInFactSide() throws TableNotExistException {
CatalogTableStatistics tableStatistics = new CatalogTableStatistics(1, 1, 1, 1);
catalog.alterTableStatistics(
new ObjectPath("test_database", "dim"), tableStatistics, false);
// in this case. amount + 1 as amount is not a partition key, will succeed.
String query =
"Select * from dim join (select fact_date_sk, amount + 1 as amount from fact_part) t1 on"
+ " fact_date_sk = dim_date_sk and t1.amount = dim.amount where dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testLeftOuterJoinWithFactInLeft() {
// left outer join with fact in left will not succeed. Because if fact in left, filtering
// condition is useless.
String query =
"Select * from fact_part left outer join dim on fact_part.fact_date_sk = dim.dim_date_sk"
+ " where dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testLeftOutJoinWithFactInRight() {
// left outer join with fact in right will succeed.
String query =
"Select * from dim left outer join fact_part on fact_part.fact_date_sk = dim.dim_date_sk"
+ " where dim.price < 500";
util.verifyRelPlan(query);
}
@Test
void testSemiJoin() {
// Now dynamic partition pruning support semi join, this query will succeed.
String query =
"Select * from fact_part where fact_part.fact_date_sk in"
+ " (select dim_date_sk from dim where dim.price < 500)";
util.verifyRelPlan(query);
}
@Test
void testFullOuterJoin() {
// Now dynamic partition pruning don't support full outer join.
String query =
"Select * from fact_part full outer join"
+ " (select * from dim where dim.price < 500) on fact_date_sk = dim_date_sk";
util.verifyRelPlan(query);
}
@Test
void testAntiJoin() {
// Now dynamic partition prune don't support anti join.
String query =
"Select * from fact_part where not exists"
+ " (select dim_date_sk from dim where dim.price < 500)";
util.verifyRelPlan(query);
}
@Test
void testMultiJoin() {
// Another table.
util.tableEnv()
.executeSql(
"CREATE TABLE sales (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"Select * from fact_part, dim, sales where fact_part.id = sales.id and"
+ " fact_part.fact_date_sk = dim.dim_date_sk and dim.price < 500 and dim.amount > 100";
util.verifyRelPlan(query);
}
@Test
void testComplexDimSideWithJoinInDimSide() {
// TODO, Dpp will not success with complex dim side.
util.tableEnv()
.executeSql(
"CREATE TABLE sales (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")");
util.tableEnv()
.executeSql(
"CREATE TABLE item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"Select * from fact_part join"
+ " (select * from dim, sales, item where"
+ " dim.id = sales.id and sales.id = item.id and dim.price < 500 and sales.price > 300) dimSide"
+ " on fact_part.fact_date_sk = dimSide.dim_date_sk";
util.verifyRelPlan(query);
}
@Test
void testComplexDimSideWithAggInDimSide() {
// Dim side contains agg will not succeed in this version, it will improve later.
util.tableEnv()
.executeSql(
"CREATE TABLE sales (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"Select * from fact_part join"
+ " (select dim_date_sk, sum(dim.price) from dim where"
+ " dim.price < 500 group by dim_date_sk) dimSide"
+ " on fact_part.fact_date_sk = dimSide.dim_date_sk";
util.verifyRelPlan(query);
}
@Test
void testDppWithoutJoinReorder() {
// Dpp will success
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
TableConfig tableConfig = util.tableEnv().getConfig();
// Join reorder don't open.
tableConfig.set(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED, false);
String query =
"Select * from fact_part, item, dim"
+ " where fact_part.fact_date_sk = dim.dim_date_sk"
+ " and fact_part.id = item.id"
+ " and dim.id = item.id "
+ " and dim.price < 500 and dim.price > 300";
util.verifyRelPlan(query);
}
@Test
void testDppWithSubQuery() {
// Dpp will success
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
TableConfig tableConfig = util.tableEnv().getConfig();
// Join reorder don't open.
tableConfig.set(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED, false);
String query =
"Select * from fact_part, item, dim"
+ " where fact_part.id = item.id"
+ " and dim.price in (select price from dim where amount = (select amount from dim where amount = 2000))"
+ " and fact_part.fact_date_sk = dim.dim_date_sk";
util.verifyRelPlan(query);
}
@Test
void testDppWithUnionInFactSide() {
// Dpp will success.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from (select id, fact_date_sk, amount + 1 as amount1 from fact_part where price = 1 union all "
+ "select id, fact_date_sk, amount + 1 from fact_part where price = 2) fact_part2, item, dim"
+ " where fact_part2.fact_date_sk = dim.dim_date_sk"
+ " and fact_part2.id = item.id"
+ " and dim.price < 500 and dim.price > 300";
util.verifyRelPlan(query);
}
@Test
void testDppWithAggInFactSideAndJoinKeyInGrouping() {
// Dpp will success
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from (Select fact_date_sk, item.amount, sum(fact_part.price) from fact_part "
+ "join item on fact_part.id = item.id group by fact_date_sk, item.amount) t1 "
+ "join dim on t1.fact_date_sk = dim.dim_date_sk where dim.price < 500 and dim.price > 300 ";
util.verifyRelPlan(query);
}
@Test
void testDppWithAggInFactSideAndJoinKeyInGroupFunction() {
// Dpp will not success because join key in group function.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from (Select fact_part.id, item.amount, fact_part.name, sum(fact_part.price), sum(item.price), sum(fact_date_sk) as fact_date_sk1 "
+ "from fact_part join item on fact_part.id = item.id "
+ "group by fact_part.id, fact_part.name, item.amount) t1 "
+ "join dim on t1.fact_date_sk1 = dim.dim_date_sk where dim.price < 500 and dim.price > 300 ";
util.verifyRelPlan(query);
}
@Test
void testDppWithAggInFactSideWithAggPushDownEnable() {
// Dpp will not success while fact side source support agg push down and source agg push
// down enabled is true.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from (Select id, amount, fact_date_sk, count(name), sum(price) "
+ "from fact_part where fact_date_sk > 100 group by id, amount, fact_date_sk) t1 "
+ "join dim on t1.fact_date_sk = dim.dim_date_sk where dim.price < 500 and dim.price > 300 ";
util.verifyRelPlan(query);
}
@Test
void testDppWithAggInFactSideWithAggPushDownDisable() {
// Dpp will success while fact side source disables agg push down
util.tableEnv()
.executeSql(
"CREATE TABLE fact_part_without_agg_push_down (\n"
+ " id BIGINT,\n"
+ " name STRING,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " fact_date_sk BIGINT\n"
+ ") PARTITIONED BY (fact_date_sk)\n"
+ "WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'NewSource',\n"
+ " 'partition-list' = 'fact_date_sk:1990;fact_date_sk:1991;fact_date_sk:1992',\n"
+ " 'dynamic-filtering-fields' = 'fact_date_sk;amount',\n"
+ " 'bounded' = 'true',\n"
+ " 'enable-aggregate-push-down' = 'false'\n"
+ ")");
String query =
"Select * from (Select id, amount, fact_date_sk, count(name), sum(price) "
+ "from fact_part_without_agg_push_down where fact_date_sk > 100 group by id, amount, fact_date_sk) t1 "
+ "join dim on t1.fact_date_sk = dim.dim_date_sk where dim.price < 500 and dim.price > 300 ";
util.verifyRelPlan(query);
}
@Test
void testDPPWithFactSideJoinKeyChanged() {
// If partition keys changed in fact side. DPP factor will not success.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from (select fact_date_sk + 1 as fact_date_sk, id from fact_part) fact_part1 join item on "
+ "fact_part1.id = item.id"
+ " join dim on fact_part1.fact_date_sk = dim.dim_date_sk"
+ " where dim.price < 500 and dim.price > 300";
util.verifyRelPlan(query);
}
@Test
void testDPPWithDimSideJoinKeyChanged() {
// Although partition keys changed in dim side. DPP will success.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from fact_part join item on fact_part.id = item.id"
+ " join (select dim_date_sk + 1 as dim_date_sk, price from dim) dim1"
+ " on fact_part.fact_date_sk = dim1.dim_date_sk"
+ " where dim1.price < 500 and dim1.price > 300";
util.verifyRelPlan(query);
}
@Test
void testDPPWithJoinKeysNotIncludePartitionKeys() {
// If join keys of partition table join with dim table not include partition keys, dpp will
// not success.
String ddl =
"CREATE TABLE test_database.item (\n"
+ " id BIGINT,\n"
+ " amount BIGINT,\n"
+ " price BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'bounded' = 'true'\n"
+ ")";
util.tableEnv().executeSql(ddl);
String query =
"Select * from fact_part, item, dim"
+ " where fact_part.id = dim.id"
+ " and fact_part.id = item.id"
+ " and dim.id = item.id "
+ " and dim.price < 500 and dim.price > 300";
util.verifyRelPlan(query);
}
@Test
void testDppFactSideCannotReuseWithSameCommonSource() {
String query =
"SELECT * FROM(\n"
+ " Select fact_part.id, fact_part.price, fact_part.amount from fact_part join (Select * from dim) t1"
+ " on fact_part.fact_date_sk = dim_date_sk where t1.price < 500\n"
+ " UNION ALL Select fact_part.id, fact_part.price, fact_part.amount from fact_part)";
util.verifyExecPlan(query);
}
@Test
void testDimSideReuseAfterProjectionPushdown() {
util.tableEnv()
.executeSql(
"CREATE TABLE fact_part2 (\n"
+ " id BIGINT,\n"
+ " name STRING,\n"
+ " amount BIGINT,\n"
+ " price BIGINT,\n"
+ " fact_date_sk BIGINT\n"
+ ") PARTITIONED BY (fact_date_sk)\n"
+ "WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'runtime-source' = 'NewSource',\n"
+ " 'partition-list' = 'fact_date_sk:1990;fact_date_sk:1991;fact_date_sk:1992',\n"
+ " 'dynamic-filtering-fields' = 'fact_date_sk;amount',\n"
+ " 'bounded' = 'true'\n"
+ ")");
String query =
"SELECT /*+ BROADCAST(dim) */ fact3.* FROM\n"
+ "(SELECT /*+ BROADCAST(dim) */ fact.id, fact.price, fact.amount FROM (\n"
+ " SELECT id, price, amount, fact_date_sk FROM fact_part "
+ " UNION ALL SELECT id, price, amount, fact_date_sk FROM fact_part2) fact, dim\n"
+ " WHERE fact_date_sk = dim_date_sk"
+ " and dim.price < 500 and dim.price > 300)\n fact3 JOIN dim"
+ " ON fact3.amount = dim.id AND dim.amount < 10";
util.verifyExecPlan(query);
}
}
|
oracle/graal | 35,768 | substratevm/src/com.oracle.graal.pointsto/src/com/oracle/graal/pointsto/ObjectScanner.java | /*
* Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.pointsto;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Deque;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.atomic.AtomicInteger;
import org.graalvm.word.WordBase;
import com.oracle.graal.pointsto.constraints.UnsupportedFeatureException;
import com.oracle.graal.pointsto.heap.HeapSnapshotVerifier;
import com.oracle.graal.pointsto.heap.ImageHeapArray;
import com.oracle.graal.pointsto.heap.ImageHeapConstant;
import com.oracle.graal.pointsto.heap.ImageHeapScanner;
import com.oracle.graal.pointsto.meta.AnalysisField;
import com.oracle.graal.pointsto.meta.AnalysisMethod;
import com.oracle.graal.pointsto.meta.AnalysisType;
import com.oracle.graal.pointsto.reports.ReportUtils;
import com.oracle.graal.pointsto.util.AnalysisError;
import com.oracle.graal.pointsto.util.CompletionExecutor;
import jdk.graal.compiler.graph.NodeSourcePosition;
import jdk.vm.ci.code.BytecodePosition;
import jdk.vm.ci.meta.Constant;
import jdk.vm.ci.meta.JavaConstant;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.ResolvedJavaField;
import jdk.vm.ci.meta.ResolvedJavaMethod;
/**
* Provides functionality for scanning constant objects.
*
* The scanning is done in parallel. The set of visited elements is a special data structure whose
* structure can be reused over multiple scanning iterations to save CPU resources. (For details
* {@link ReusableSet}).
*/
public class ObjectScanner {
private static final String INDENTATION_AFTER_NEWLINE = " ";
protected final BigBang bb;
private final ReusableSet scannedObjects;
private final CompletionExecutor executor;
private final Deque<WorklistEntry> worklist;
private final ObjectScanningObserver scanningObserver;
public ObjectScanner(BigBang bb, CompletionExecutor executor, ReusableSet scannedObjects, ObjectScanningObserver scanningObserver) {
this.bb = bb;
this.scanningObserver = scanningObserver;
if (executor != null) {
this.executor = executor;
this.worklist = null;
} else {
this.executor = null;
this.worklist = new ConcurrentLinkedDeque<>();
}
this.scannedObjects = scannedObjects;
}
public void scanBootImageHeapRoots() {
scanBootImageHeapRoots(bb.getUniverse().getEmbeddedRoots());
}
public void scanBootImageHeapRoots(Map<Constant, Object> embeddedConstants) {
scanBootImageHeapRoots(null, null, embeddedConstants);
}
public void scanBootImageHeapRoots(Comparator<AnalysisField> fieldComparator, Comparator<Object> embeddedRootComparator) {
scanBootImageHeapRoots(fieldComparator, embeddedRootComparator, bb.getUniverse().getEmbeddedRoots());
}
public void scanBootImageHeapRoots(Comparator<AnalysisField> fieldComparator, Comparator<Object> embeddedRootComparator, Map<Constant, Object> embeddedRoots) {
// scan the original roots
// the original roots are all the static fields, of object type, that were accessed
Collection<AnalysisField> fields = bb.getUniverse().getFields();
if (fieldComparator != null) {
ArrayList<AnalysisField> fieldsList = new ArrayList<>(fields);
fieldsList.sort(fieldComparator);
fields = fieldsList;
}
for (AnalysisField field : fields) {
if (Modifier.isStatic(field.getModifiers()) && field.isRead()) {
execute(() -> scanStaticFieldRoot(field));
}
}
// scan the constant nodes
if (embeddedRootComparator != null) {
embeddedRoots.entrySet().stream()
.sorted(Map.Entry.comparingByValue(embeddedRootComparator))
.filter(entry -> entry.getKey() instanceof JavaConstant)
.forEach(entry -> execute(() -> scanEmbeddedRoot((JavaConstant) entry.getKey(), entry.getValue())));
} else {
embeddedRoots.entrySet().stream()
.filter(entry -> entry.getKey() instanceof JavaConstant)
.forEach(entry -> execute(() -> scanEmbeddedRoot((JavaConstant) entry.getKey(), entry.getValue())));
}
finish();
}
private void execute(Runnable runnable) {
if (executor != null) {
executor.execute(debug -> runnable.run());
} else {
runnable.run();
}
}
protected void scanEmbeddedRoot(JavaConstant root, Object position) {
if (root instanceof ImageHeapConstant ihc && ihc.getHostedObject() == null) {
/* Skip embedded simulated constants. */
return;
}
EmbeddedRootScan reason = new EmbeddedRootScan(position, root);
try {
scanningObserver.forEmbeddedRoot(root, reason);
scanConstant(root, reason);
} catch (UnsupportedFeatureException | AnalysisError.TypeNotFoundError ex) {
bb.getUnsupportedFeatures().addMessage(reason.toString(), reason.getMethod(), ex.getMessage(), null, ex);
}
}
/**
* Scans the value of a root field.
*
* @param field the scanned root field
*/
protected final void scanStaticFieldRoot(AnalysisField field) {
if (!field.installableInLayer()) {
// skip fields not installable in this layer
return;
}
scanField(field, null, null);
}
/**
* Scans the value of a field giving a receiver object.
*
* @param field the scanned field
* @param receiver the receiver object
*/
protected void scanField(AnalysisField field, JavaConstant receiver, ScanReason prevReason) {
ScanReason reason = new FieldScan(field, receiver, prevReason);
try {
if (!bb.getUniverse().getHeapScanner().isValueAvailable(field, receiver)) {
/* The value is not available yet. */
return;
}
assert isUnwrapped(receiver) : receiver;
JavaConstant fieldValue = readFieldValue(field, receiver);
if (fieldValue instanceof ImageHeapConstant ihc && ihc.getHostedObject() == null) {
/* Skip reachable simulated constants. */
return;
}
if (fieldValue == null) {
StringBuilder backtrace = new StringBuilder();
buildObjectBacktrace(bb, reason, backtrace);
throw AnalysisError.shouldNotReachHere("Could not find field " + field.format("%H.%n") +
(receiver == null ? "" : " on " + constantType(bb, receiver).toJavaName()) +
System.lineSeparator() + backtrace);
}
if (fieldValue.getJavaKind() == JavaKind.Object && bb.getHostVM().isRelocatedPointer(fieldValue)) {
scanningObserver.forRelocatedPointerFieldValue(receiver, field, fieldValue, reason);
} else if (fieldValue.isNull()) {
scanningObserver.forNullFieldValue(receiver, field, reason);
} else if (fieldValue.getJavaKind() == JavaKind.Object) {
/* First notify the observer about the field value... */
scanningObserver.forNonNullFieldValue(receiver, field, fieldValue, reason);
/*
* ... and only then scan the new value, i.e., follow its references. The order is
* important for observers that expect to see the receiver before any of its
* referenced elements are being scanned.
*/
scanConstant(fieldValue, reason);
} else if (fieldValue.getJavaKind().isPrimitive()) {
scanningObserver.forPrimitiveFieldValue(receiver, field, fieldValue, reason);
}
} catch (UnsupportedFeatureException | AnalysisError.TypeNotFoundError ex) {
unsupportedFeatureDuringFieldScan(bb, field, receiver, ex, reason);
} catch (AnalysisError analysisError) {
if (analysisError.getCause() instanceof UnsupportedFeatureException ex) {
unsupportedFeatureDuringFieldScan(bb, field, receiver, ex, reason);
} else {
throw analysisError;
}
}
}
protected JavaConstant readFieldValue(AnalysisField field, JavaConstant receiver) {
/* The object scanner processes hosted values. We must not see shadow heap values here. */
AnalysisError.guarantee(!(receiver instanceof ImageHeapConstant));
return bb.getUniverse().getHostedValuesProvider().readFieldValueWithReplacement(field, receiver);
}
/**
* Must unwrap the receiver if it is an ImageHeapConstant to scan the hosted value, if any, for
* verification, otherwise the verification just compares shadow heap with shadow heap for
* embedded roots, which is completely useless.
*/
private static JavaConstant maybeUnwrap(JavaConstant receiver) {
if (receiver instanceof ImageHeapConstant heapConstant && heapConstant.getHostedObject() != null) {
return heapConstant.getHostedObject();
}
return receiver;
}
private static boolean isUnwrapped(JavaConstant receiver) {
if (receiver instanceof ImageHeapConstant heapConstant) {
// Non hosted backed ImageHeapConstant is considered unwrapped
return heapConstant.getHostedObject() == null;
}
return true;
}
/**
* Scans constant arrays, one element at the time.
*
* @param array the array to be scanned
*/
protected final void scanArray(JavaConstant array, ScanReason prevReason) {
assert isUnwrapped(array) : array;
AnalysisType arrayType = bb.getMetaAccess().lookupJavaType(array);
ScanReason reason = new ArrayScan(arrayType, array, prevReason);
if (array instanceof ImageHeapConstant) {
if (!arrayType.getComponentType().isPrimitive()) {
ImageHeapArray heapArray = (ImageHeapArray) array;
for (int idx = 0; idx < heapArray.getLength(); idx++) {
final JavaConstant element = heapArray.readElementValue(idx);
if (element.isNull()) {
scanningObserver.forNullArrayElement(array, arrayType, idx, reason);
} else {
scanArrayElement(array, arrayType, reason, idx, element);
}
}
}
} else {
Object[] arrayObject = (Object[]) constantAsObject(bb, array);
for (int idx = 0; idx < arrayObject.length; idx++) {
Object e = arrayObject[idx];
if (e == null) {
scanningObserver.forNullArrayElement(array, arrayType, idx, reason);
} else {
try {
JavaConstant element = bb.getUniverse().replaceObjectWithConstant(e);
scanArrayElement(array, arrayType, reason, idx, element);
} catch (UnsupportedFeatureException | AnalysisError.TypeNotFoundError ex) {
unsupportedFeatureDuringConstantScan(bb, bb.getUniverse().getHostedValuesProvider().forObject(e), ex, reason);
}
}
}
}
}
private void scanArrayElement(JavaConstant array, AnalysisType arrayType, ScanReason reason, int idx, JavaConstant elementConstant) {
AnalysisType elementType = bb.getMetaAccess().lookupJavaType(elementConstant);
/* First notify the observer about the array element value... */
scanningObserver.forNonNullArrayElement(array, arrayType, elementConstant, elementType, idx, reason);
/*
* ... and only then scan the new value, i.e., follow its references. The order is important
* for observers that expect to see the receiver before any of its referenced elements are
* being scanned.
*/
scanConstant(elementConstant, reason);
}
public void scanConstant(JavaConstant value, ScanReason reason) {
if (value.isNull() || value.getJavaKind().isPrimitive() || bb.getMetaAccess().isInstanceOf(value, WordBase.class)) {
return;
}
JavaConstant unwrappedValue = maybeUnwrap(value);
Object valueObj = unwrappedValue instanceof ImageHeapConstant ? unwrappedValue : constantAsObject(bb, unwrappedValue);
if (scannedObjects.putAndAcquire(valueObj) == null) {
try {
scanningObserver.forScannedConstant(unwrappedValue, reason);
} finally {
scannedObjects.release(valueObj);
WorklistEntry worklistEntry = new WorklistEntry(unwrappedValue, reason);
if (executor != null) {
executor.execute(debug -> doScan(worklistEntry));
} else {
worklist.push(worklistEntry);
}
}
}
}
/**
* Use the constant hashCode as a key for the unsupported feature to register only one error
* message if the constant is reachable from multiple places.
*/
public static void unsupportedFeatureDuringConstantScan(BigBang bb, JavaConstant constant, Throwable e, ScanReason reason) {
unsupportedFeature(bb, String.valueOf(receiverHashCode(constant)), e.getMessage(), reason);
}
/**
* Use the field format and receiver hashCode as a key for the unsupported feature to register
* only one error message if the value is reachable from multiple places. For example both the
* heap scanning and the heap verification would scan a field that contains an illegal value.
*/
public static void unsupportedFeatureDuringFieldScan(BigBang bb, AnalysisField field, JavaConstant receiver, Throwable e, ScanReason reason) {
unsupportedFeature(bb, (receiver != null ? receiverHashCode(receiver) + "_" : "") + field.format("%H.%n"), e.getMessage(), reason);
}
public static void unsupportedFeatureDuringFieldFolding(BigBang bb, AnalysisField field, JavaConstant receiver, Throwable e, AnalysisMethod parsedMethod, int bci) {
ScanReason reason = new FieldConstantFold(field, parsedMethod, bci, receiver, new MethodParsing(parsedMethod));
unsupportedFeature(bb, (receiver != null ? receiverHashCode(receiver) + "_" : "") + field.format("%H.%n"), e.getMessage(), reason);
}
/**
* The {@link ImageHeapScanner} may find issue when scanning the {@link ImageHeapConstant}
* whereas the {@link HeapSnapshotVerifier} may find issues when scanning the original hosted
* objects. Use a consistent hash code as a key to map them to the same error message.
*/
private static int receiverHashCode(JavaConstant receiver) {
if (receiver instanceof ImageHeapConstant) {
JavaConstant hostedObject = ((ImageHeapConstant) receiver).getHostedObject();
if (hostedObject != null) {
return hostedObject.hashCode();
}
}
return receiver.hashCode();
}
public static void unsupportedFeature(BigBang bb, String key, String message, ScanReason reason) {
StringBuilder objectBacktrace = new StringBuilder();
AnalysisMethod method = buildObjectBacktrace(bb, reason, objectBacktrace);
bb.getUnsupportedFeatures().addMessage(key, method, message, objectBacktrace.toString());
}
static final String indent = " ";
public static AnalysisMethod buildObjectBacktrace(BigBang bb, ScanReason reason, StringBuilder objectBacktrace) {
return buildObjectBacktrace(bb, reason, objectBacktrace, "Object was reached by");
}
public static AnalysisMethod buildObjectBacktrace(BigBang bb, ScanReason reason, StringBuilder objectBacktrace, String header) {
ScanReason cur = reason;
objectBacktrace.append(header);
objectBacktrace.append(System.lineSeparator()).append(indent).append(cur.toString(bb));
ScanReason rootReason = cur;
cur = cur.getPrevious();
while (cur != null) {
objectBacktrace.append(System.lineSeparator()).append(indent).append(cur.toString(bb));
ScanReason previous = cur.getPrevious();
rootReason = previous;
cur = previous;
}
if (rootReason instanceof EmbeddedRootScan) {
/* The root constant was found during scanning of 'method'. */
return ((EmbeddedRootScan) rootReason).getMethod();
}
/* The root constant was not found during method scanning. */
return null;
}
public static String asString(BigBang bb, JavaConstant constant) {
return asString(bb, constant, true);
}
public static String asString(BigBang bb, JavaConstant constant, boolean appendToString) {
if (constant == null || constant.isNull()) {
return "null";
}
AnalysisType type = bb.getMetaAccess().lookupJavaType(constant);
JavaConstant hosted = constant;
if (constant instanceof ImageHeapConstant heapConstant) {
JavaConstant hostedObject = heapConstant.getHostedObject();
if (hostedObject == null) {
// Checkstyle: allow Class.getSimpleName
return constant.getClass().getSimpleName() + "<" + type.toJavaName() + ">";
// Checkstyle: disallow Class.getSimpleName
}
hosted = hostedObject;
}
if (hosted.getJavaKind().isPrimitive()) {
return hosted.toValueString();
}
Object obj = constantAsObject(bb, hosted);
String str = type.toJavaName() + '@' + Integer.toHexString(System.identityHashCode(obj));
if (appendToString) {
try {
str += ": " + limit(obj.toString(), 80).replace(System.lineSeparator(), "");
} catch (Throwable e) {
// ignore any error in creating the string representation
}
}
return str;
}
public static String limit(String value, int length) {
StringBuilder buf = new StringBuilder(value);
if (buf.length() > length) {
buf.setLength(length);
buf.append("...");
}
return buf.toString();
}
/**
* Processes one constant entry. If the constant has an instance class then it scans its fields,
* using the constant as a receiver. If the constant has an array class then it scans the array
* element constants.
*/
private void doScan(WorklistEntry entry) {
try {
AnalysisType type = bb.getMetaAccess().lookupJavaType(entry.constant);
type.registerAsReachable(entry.reason);
if (type.isInstanceClass()) {
/* Scan constant's instance fields. */
for (ResolvedJavaField javaField : type.getInstanceFields(true)) {
AnalysisField field = (AnalysisField) javaField;
if (field.isRead()) {
assert !Modifier.isStatic(field.getModifiers()) : field;
scanField(field, entry.constant, entry.reason);
}
}
} else if (type.isArray() && type.getComponentType().getJavaKind() == JavaKind.Object) {
/* Scan the array elements. */
scanArray(entry.constant, entry.reason);
}
} catch (UnsupportedFeatureException | AnalysisError.TypeNotFoundError ex) {
unsupportedFeatureDuringConstantScan(bb, entry.constant, ex, entry.reason);
}
}
/**
* Process all consequences for scanned fields. This is done in parallel. Buckets of fields are
* emitted into the {@code exec}, to mitigate the calling overhead.
*
* Processing fields can issue new fields to be scanned so we always add the check for workitems
* at the end of the worklist.
*/
protected void finish() {
if (executor == null) {
while (!worklist.isEmpty()) {
int size = worklist.size();
for (int i = 0; i < size; i++) {
doScan(worklist.remove());
}
}
}
}
public static AnalysisType constantType(BigBang bb, JavaConstant constant) {
return bb.getMetaAccess().lookupJavaType(constant);
}
public static Object constantAsObject(BigBang bb, JavaConstant constant) {
return bb.getSnippetReflectionProvider().asObject(Object.class, constant);
}
static class WorklistEntry {
/** The constant to be scanned. */
private final JavaConstant constant;
/**
* The reason this constant was scanned, i.e., either reached from a method scan, from a
* static field, from an instance field resolved on another constant, or from a constant
* array indexing.
*/
private final ScanReason reason;
WorklistEntry(JavaConstant constant, ScanReason reason) {
this.constant = constant;
this.reason = reason;
}
public ScanReason getReason() {
return reason;
}
}
public abstract static class ScanReason {
final ScanReason previous;
final JavaConstant constant;
protected ScanReason(ScanReason previous, JavaConstant constant) {
this.previous = previous;
this.constant = constant;
}
public ScanReason getPrevious() {
/*
* Not all created heap constants can become reachable, hence some of them start with an
* unknown reachability reason. The reason becomes available only when the constant is
* linked in the object graph, i.e., it becomes reachable. If the ScanReason object was
* created before the constant was marked as reachable then its previous field is set to
* UNKNOWN. If that's the case fallback to the constant reachability reason.
*/
if (previous == OtherReason.UNKNOWN) {
if (constant instanceof ImageHeapConstant heapConstant && heapConstant.getReachableReason() instanceof ScanReason parentReason) {
return parentReason;
}
}
return previous;
}
@SuppressWarnings("unused")
public String toString(BigBang bb) {
return toString();
}
}
public static class OtherReason extends ScanReason {
public static final ScanReason LATE_SCAN = new OtherReason("late scan, after sealing heap");
public static final ScanReason UNKNOWN = new OtherReason("manually created constant");
public static final ScanReason RESCAN = new OtherReason("manually triggered rescan");
public static final ScanReason HUB = new OtherReason("scanning a class constant");
public static final ScanReason PERSISTED = new OtherReason("persisted");
final String reason;
public OtherReason(String reason) {
super(null, null);
this.reason = reason;
}
@Override
public String toString() {
return reason;
}
}
public static class FieldScan extends ScanReason {
final AnalysisField field;
private static ScanReason previous(AnalysisField field, JavaConstant receiver) {
/*
* Since there is no previous reason we try to infer one either from the receiver
* constant or from the field read-by reason.
*/
Object reason;
if (receiver instanceof ImageHeapConstant heapConstant) {
AnalysisError.guarantee(heapConstant.isReachable());
reason = heapConstant.getReachableReason();
} else {
reason = field.getReadBy();
}
if (reason instanceof ScanReason scanReason) {
return scanReason;
} else if (reason instanceof BytecodePosition position) {
ResolvedJavaMethod readingMethod = position.getMethod();
return new MethodParsing((AnalysisMethod) readingMethod);
} else if (reason instanceof AnalysisMethod method) {
return new MethodParsing(method);
} else if (reason != null) {
return new OtherReason("registered as read because: " + reason);
}
return null;
}
public FieldScan(AnalysisField field) {
this(field, null, previous(field, null));
}
public FieldScan(AnalysisField field, JavaConstant receiver) {
this(field, receiver, previous(field, receiver));
}
public FieldScan(AnalysisField field, JavaConstant receiver, ScanReason previous) {
super(previous, receiver);
this.field = field;
}
public AnalysisField getField() {
return field;
}
public String location() {
Object readBy = field.getReadBy();
if (readBy instanceof BytecodePosition) {
BytecodePosition position = (BytecodePosition) readBy;
return position.getMethod().asStackTraceElement(position.getBCI()).toString();
} else if (readBy instanceof AnalysisMethod) {
return ((AnalysisMethod) readBy).asStackTraceElement(0).toString();
} else {
return "<unknown-location>";
}
}
@Override
public String toString(BigBang bb) {
if (field.isStatic()) {
return "reading static field " + field.format("%H.%n") + System.lineSeparator() + " at " + location();
} else {
/* Instance field scans must have a receiver, hence the 'of'. */
return "reading field " + field.format("%H.%n") + " of constant " + System.lineSeparator() + INDENTATION_AFTER_NEWLINE + asString(bb, constant);
}
}
@Override
public String toString() {
return field.format("%H.%n");
}
}
public static class FieldConstantFold extends ScanReason {
final AnalysisField field;
private final AnalysisMethod parsedMethod;
private final int bci;
public FieldConstantFold(AnalysisField field, AnalysisMethod parsedMethod, int bci, JavaConstant receiver, ScanReason previous) {
super(previous, receiver);
this.field = field;
this.parsedMethod = parsedMethod;
this.bci = bci;
}
@Override
public String toString(BigBang bb) {
StackTraceElement location = parsedMethod.asStackTraceElement(bci);
if (field.isStatic()) {
return "trying to constant fold static field " + field.format("%H.%n") + System.lineSeparator() + " at " + location;
} else {
/* Instance field scans must have a receiver, hence the 'of'. */
return "trying to constant fold field " + field.format("%H.%n") + " of constant " + System.lineSeparator() +
INDENTATION_AFTER_NEWLINE + asString(bb, constant) + System.lineSeparator() + " at " + location;
}
}
@Override
public String toString() {
return field.format("%H.%n");
}
}
public static class MethodParsing extends ScanReason {
final AnalysisMethod method;
public MethodParsing(AnalysisMethod method) {
this(method, null);
}
public MethodParsing(AnalysisMethod method, ScanReason previous) {
super(previous, null);
this.method = method;
}
public AnalysisMethod getMethod() {
return method;
}
@Override
public String toString() {
String str = String.format("parsing method %s reachable via the parsing context", method.asStackTraceElement(0));
str += ReportUtils.parsingContext(method, indent + indent);
return str;
}
}
public static class ArrayScan extends ScanReason {
final AnalysisType arrayType;
final int idx;
public ArrayScan(AnalysisType arrayType, JavaConstant array, ScanReason previous) {
this(arrayType, array, previous, -1);
}
public ArrayScan(AnalysisType arrayType, JavaConstant array, ScanReason previous, int idx) {
super(previous, array);
this.arrayType = arrayType;
this.idx = idx;
}
@Override
public String toString(BigBang bb) {
return "indexing into array " + asString(bb, constant) + (idx != -1 ? " at index " + idx : "");
}
@Override
public String toString() {
return arrayType.toJavaName(true);
}
}
public static class EmbeddedRootScan extends ScanReason {
private final BytecodePosition position;
private final AnalysisMethod method;
private final Object reason;
public EmbeddedRootScan(Object reason, JavaConstant root) {
this(root, reason, rootScanReason(reason));
}
private EmbeddedRootScan(JavaConstant root, Object reason, ScanReason previous) {
super(previous, root);
this.reason = reason;
if (reason instanceof NodeSourcePosition src) {
this.position = src;
this.method = (AnalysisMethod) src.getMethod();
} else if (reason instanceof AnalysisMethod met) {
this.method = met;
this.position = null;
} else {
this.method = null;
this.position = null;
}
}
public Object getReason() {
return reason;
}
public AnalysisMethod getMethod() {
return method;
}
@Override
public String toString(BigBang bb) {
return "scanning root constant " + asString(bb, constant) + " embedded in" + System.lineSeparator() + INDENTATION_AFTER_NEWLINE + asStackTraceElement();
}
@Override
public String toString() {
return asStackTraceElement();
}
private static ScanReason rootScanReason(Object reason) {
if (reason instanceof NodeSourcePosition position) {
return new MethodParsing((AnalysisMethod) position.getMethod());
}
return new OtherReason(reason.toString());
}
private String asStackTraceElement() {
if (position != null) {
return String.valueOf(method.asStackTraceElement(position.getBCI()));
} else if (method != null) {
return String.valueOf(method.asStackTraceElement(0));
} else {
return "<unknown>";
}
}
}
/**
* This datastructure keeps track if an object was already put or not atomically. It takes
* advantage of the fact that each typeflow iteration adds more objects to the set but never
* removes elements. Since insertions into maps are expensive we keep the map around over
* multiple iterations and only update the AtomicInteger sequence number after each iteration.
*
* Furthermore it also serializes on the object put until the method release is called with this
* object. So each object goes through two states:
* <li>In flight: counter = sequence - 1
* <li>Committed: counter = sequence
*
* If the object is in state in flight, all other calls with this object to putAndAcquire will
* block until release with the object is called.
*/
public static final class ReusableSet {
/**
* The storage of atomic integers. During analysis the constant count for rather large
* programs such as the JS interpreter are 90k objects. Hence we use 64k as a good start.
*/
private final IdentityHashMap<Object, AtomicInteger> store = new IdentityHashMap<>(65536);
private int sequence = 0;
public Object putAndAcquire(Object object) {
IdentityHashMap<Object, AtomicInteger> map = this.store;
AtomicInteger i = map.get(object);
int seq = this.sequence;
int inflightSequence = seq - 1;
while (true) {
if (i != null) {
int current = i.get();
if (current == seq) {
return object; // Found and is already released
} else {
if (current != inflightSequence && i.compareAndSet(current, inflightSequence)) {
return null; // We have successfully acquired
} else { // Someone else has acquired
while (i.get() != seq) { // Wait until released
Thread.yield();
}
return object; // Object has been released
}
}
} else {
AtomicInteger newSequence = new AtomicInteger(inflightSequence);
synchronized (map) {
i = map.putIfAbsent(object, newSequence);
if (i == null) {
return null;
} else {
continue;
}
}
}
}
}
public void release(Object o) {
IdentityHashMap<Object, AtomicInteger> map = this.store;
AtomicInteger i = map.get(o);
if (i == null) {
// We have missed a value likely someone else has updated the map at the same time.
// Now synchronize
synchronized (map) {
i = map.get(o);
}
}
i.set(sequence);
}
public void reset() {
sequence += 2;
}
}
}
|
googleapis/google-cloud-java | 35,437 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/QueryResultOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/session.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
public interface QueryResultOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.dialogflow.cx.v3.QueryResult)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* If [natural language text][google.cloud.dialogflow.cx.v3.TextInput] was
* provided as input, this field will contain a copy of the text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return Whether the text field is set.
*/
boolean hasText();
/**
*
*
* <pre>
* If [natural language text][google.cloud.dialogflow.cx.v3.TextInput] was
* provided as input, this field will contain a copy of the text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The text.
*/
java.lang.String getText();
/**
*
*
* <pre>
* If [natural language text][google.cloud.dialogflow.cx.v3.TextInput] was
* provided as input, this field will contain a copy of the text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The bytes for text.
*/
com.google.protobuf.ByteString getTextBytes();
/**
*
*
* <pre>
* If an [intent][google.cloud.dialogflow.cx.v3.IntentInput] was provided as
* input, this field will contain a copy of the intent identifier. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/intents/<IntentID>`.
* </pre>
*
* <code>string trigger_intent = 11 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the triggerIntent field is set.
*/
boolean hasTriggerIntent();
/**
*
*
* <pre>
* If an [intent][google.cloud.dialogflow.cx.v3.IntentInput] was provided as
* input, this field will contain a copy of the intent identifier. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/intents/<IntentID>`.
* </pre>
*
* <code>string trigger_intent = 11 [(.google.api.resource_reference) = { ... }</code>
*
* @return The triggerIntent.
*/
java.lang.String getTriggerIntent();
/**
*
*
* <pre>
* If an [intent][google.cloud.dialogflow.cx.v3.IntentInput] was provided as
* input, this field will contain a copy of the intent identifier. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/intents/<IntentID>`.
* </pre>
*
* <code>string trigger_intent = 11 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for triggerIntent.
*/
com.google.protobuf.ByteString getTriggerIntentBytes();
/**
*
*
* <pre>
* If [natural language speech
* audio][google.cloud.dialogflow.cx.v3.AudioInput] was provided as input,
* this field will contain the transcript for the audio.
* </pre>
*
* <code>string transcript = 12;</code>
*
* @return Whether the transcript field is set.
*/
boolean hasTranscript();
/**
*
*
* <pre>
* If [natural language speech
* audio][google.cloud.dialogflow.cx.v3.AudioInput] was provided as input,
* this field will contain the transcript for the audio.
* </pre>
*
* <code>string transcript = 12;</code>
*
* @return The transcript.
*/
java.lang.String getTranscript();
/**
*
*
* <pre>
* If [natural language speech
* audio][google.cloud.dialogflow.cx.v3.AudioInput] was provided as input,
* this field will contain the transcript for the audio.
* </pre>
*
* <code>string transcript = 12;</code>
*
* @return The bytes for transcript.
*/
com.google.protobuf.ByteString getTranscriptBytes();
/**
*
*
* <pre>
* If an [event][google.cloud.dialogflow.cx.v3.EventInput] was provided as
* input, this field will contain the name of the event.
* </pre>
*
* <code>string trigger_event = 14;</code>
*
* @return Whether the triggerEvent field is set.
*/
boolean hasTriggerEvent();
/**
*
*
* <pre>
* If an [event][google.cloud.dialogflow.cx.v3.EventInput] was provided as
* input, this field will contain the name of the event.
* </pre>
*
* <code>string trigger_event = 14;</code>
*
* @return The triggerEvent.
*/
java.lang.String getTriggerEvent();
/**
*
*
* <pre>
* If an [event][google.cloud.dialogflow.cx.v3.EventInput] was provided as
* input, this field will contain the name of the event.
* </pre>
*
* <code>string trigger_event = 14;</code>
*
* @return The bytes for triggerEvent.
*/
com.google.protobuf.ByteString getTriggerEventBytes();
/**
*
*
* <pre>
* If a [DTMF][google.cloud.dialogflow.cx.v3.DtmfInput] was provided as
* input, this field will contain a copy of the
* [DtmfInput][google.cloud.dialogflow.cx.v3.DtmfInput].
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.DtmfInput dtmf = 23;</code>
*
* @return Whether the dtmf field is set.
*/
boolean hasDtmf();
/**
*
*
* <pre>
* If a [DTMF][google.cloud.dialogflow.cx.v3.DtmfInput] was provided as
* input, this field will contain a copy of the
* [DtmfInput][google.cloud.dialogflow.cx.v3.DtmfInput].
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.DtmfInput dtmf = 23;</code>
*
* @return The dtmf.
*/
com.google.cloud.dialogflow.cx.v3.DtmfInput getDtmf();
/**
*
*
* <pre>
* If a [DTMF][google.cloud.dialogflow.cx.v3.DtmfInput] was provided as
* input, this field will contain a copy of the
* [DtmfInput][google.cloud.dialogflow.cx.v3.DtmfInput].
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.DtmfInput dtmf = 23;</code>
*/
com.google.cloud.dialogflow.cx.v3.DtmfInputOrBuilder getDtmfOrBuilder();
/**
*
*
* <pre>
* The language that was triggered during intent detection.
* See [Language
* Support](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* for a list of the currently supported language codes.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The languageCode.
*/
java.lang.String getLanguageCode();
/**
*
*
* <pre>
* The language that was triggered during intent detection.
* See [Language
* Support](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* for a list of the currently supported language codes.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The bytes for languageCode.
*/
com.google.protobuf.ByteString getLanguageCodeBytes();
/**
*
*
* <pre>
* The collected [session
* parameters][google.cloud.dialogflow.cx.v3.SessionInfo.parameters].
*
* Depending on your protocol or client library language, this is a
* map, associative array, symbol table, dictionary, or JSON object
* composed of a collection of (MapKey, MapValue) pairs:
*
* * MapKey type: string
* * MapKey value: parameter name
* * MapValue type: If parameter's entity type is a composite entity then use
* map, otherwise, depending on the parameter value type, it could be one of
* string, number, boolean, null, list or map.
* * MapValue value: If parameter's entity type is a composite entity then use
* map from composite entity property names to property values, otherwise,
* use parameter value.
* </pre>
*
* <code>.google.protobuf.Struct parameters = 3;</code>
*
* @return Whether the parameters field is set.
*/
boolean hasParameters();
/**
*
*
* <pre>
* The collected [session
* parameters][google.cloud.dialogflow.cx.v3.SessionInfo.parameters].
*
* Depending on your protocol or client library language, this is a
* map, associative array, symbol table, dictionary, or JSON object
* composed of a collection of (MapKey, MapValue) pairs:
*
* * MapKey type: string
* * MapKey value: parameter name
* * MapValue type: If parameter's entity type is a composite entity then use
* map, otherwise, depending on the parameter value type, it could be one of
* string, number, boolean, null, list or map.
* * MapValue value: If parameter's entity type is a composite entity then use
* map from composite entity property names to property values, otherwise,
* use parameter value.
* </pre>
*
* <code>.google.protobuf.Struct parameters = 3;</code>
*
* @return The parameters.
*/
com.google.protobuf.Struct getParameters();
/**
*
*
* <pre>
* The collected [session
* parameters][google.cloud.dialogflow.cx.v3.SessionInfo.parameters].
*
* Depending on your protocol or client library language, this is a
* map, associative array, symbol table, dictionary, or JSON object
* composed of a collection of (MapKey, MapValue) pairs:
*
* * MapKey type: string
* * MapKey value: parameter name
* * MapValue type: If parameter's entity type is a composite entity then use
* map, otherwise, depending on the parameter value type, it could be one of
* string, number, boolean, null, list or map.
* * MapValue value: If parameter's entity type is a composite entity then use
* map from composite entity property names to property values, otherwise,
* use parameter value.
* </pre>
*
* <code>.google.protobuf.Struct parameters = 3;</code>
*/
com.google.protobuf.StructOrBuilder getParametersOrBuilder();
/**
*
*
* <pre>
* The list of rich messages returned to the client. Responses vary from
* simple text messages to more sophisticated, structured payloads used
* to drive complex logic.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.ResponseMessage response_messages = 4;</code>
*/
java.util.List<com.google.cloud.dialogflow.cx.v3.ResponseMessage> getResponseMessagesList();
/**
*
*
* <pre>
* The list of rich messages returned to the client. Responses vary from
* simple text messages to more sophisticated, structured payloads used
* to drive complex logic.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.ResponseMessage response_messages = 4;</code>
*/
com.google.cloud.dialogflow.cx.v3.ResponseMessage getResponseMessages(int index);
/**
*
*
* <pre>
* The list of rich messages returned to the client. Responses vary from
* simple text messages to more sophisticated, structured payloads used
* to drive complex logic.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.ResponseMessage response_messages = 4;</code>
*/
int getResponseMessagesCount();
/**
*
*
* <pre>
* The list of rich messages returned to the client. Responses vary from
* simple text messages to more sophisticated, structured payloads used
* to drive complex logic.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.ResponseMessage response_messages = 4;</code>
*/
java.util.List<? extends com.google.cloud.dialogflow.cx.v3.ResponseMessageOrBuilder>
getResponseMessagesOrBuilderList();
/**
*
*
* <pre>
* The list of rich messages returned to the client. Responses vary from
* simple text messages to more sophisticated, structured payloads used
* to drive complex logic.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.ResponseMessage response_messages = 4;</code>
*/
com.google.cloud.dialogflow.cx.v3.ResponseMessageOrBuilder getResponseMessagesOrBuilder(
int index);
/**
*
*
* <pre>
* The list of webhook ids in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_ids = 25;</code>
*
* @return A list containing the webhookIds.
*/
java.util.List<java.lang.String> getWebhookIdsList();
/**
*
*
* <pre>
* The list of webhook ids in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_ids = 25;</code>
*
* @return The count of webhookIds.
*/
int getWebhookIdsCount();
/**
*
*
* <pre>
* The list of webhook ids in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_ids = 25;</code>
*
* @param index The index of the element to return.
* @return The webhookIds at the given index.
*/
java.lang.String getWebhookIds(int index);
/**
*
*
* <pre>
* The list of webhook ids in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_ids = 25;</code>
*
* @param index The index of the value to return.
* @return The bytes of the webhookIds at the given index.
*/
com.google.protobuf.ByteString getWebhookIdsBytes(int index);
/**
*
*
* <pre>
* The list of webhook display names in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_display_names = 26;</code>
*
* @return A list containing the webhookDisplayNames.
*/
java.util.List<java.lang.String> getWebhookDisplayNamesList();
/**
*
*
* <pre>
* The list of webhook display names in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_display_names = 26;</code>
*
* @return The count of webhookDisplayNames.
*/
int getWebhookDisplayNamesCount();
/**
*
*
* <pre>
* The list of webhook display names in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_display_names = 26;</code>
*
* @param index The index of the element to return.
* @return The webhookDisplayNames at the given index.
*/
java.lang.String getWebhookDisplayNames(int index);
/**
*
*
* <pre>
* The list of webhook display names in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_display_names = 26;</code>
*
* @param index The index of the value to return.
* @return The bytes of the webhookDisplayNames at the given index.
*/
com.google.protobuf.ByteString getWebhookDisplayNamesBytes(int index);
/**
*
*
* <pre>
* The list of webhook latencies in the order of call sequence.
* </pre>
*
* <code>repeated .google.protobuf.Duration webhook_latencies = 27;</code>
*/
java.util.List<com.google.protobuf.Duration> getWebhookLatenciesList();
/**
*
*
* <pre>
* The list of webhook latencies in the order of call sequence.
* </pre>
*
* <code>repeated .google.protobuf.Duration webhook_latencies = 27;</code>
*/
com.google.protobuf.Duration getWebhookLatencies(int index);
/**
*
*
* <pre>
* The list of webhook latencies in the order of call sequence.
* </pre>
*
* <code>repeated .google.protobuf.Duration webhook_latencies = 27;</code>
*/
int getWebhookLatenciesCount();
/**
*
*
* <pre>
* The list of webhook latencies in the order of call sequence.
* </pre>
*
* <code>repeated .google.protobuf.Duration webhook_latencies = 27;</code>
*/
java.util.List<? extends com.google.protobuf.DurationOrBuilder>
getWebhookLatenciesOrBuilderList();
/**
*
*
* <pre>
* The list of webhook latencies in the order of call sequence.
* </pre>
*
* <code>repeated .google.protobuf.Duration webhook_latencies = 27;</code>
*/
com.google.protobuf.DurationOrBuilder getWebhookLatenciesOrBuilder(int index);
/**
*
*
* <pre>
* The list of webhook tags in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_tags = 29;</code>
*
* @return A list containing the webhookTags.
*/
java.util.List<java.lang.String> getWebhookTagsList();
/**
*
*
* <pre>
* The list of webhook tags in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_tags = 29;</code>
*
* @return The count of webhookTags.
*/
int getWebhookTagsCount();
/**
*
*
* <pre>
* The list of webhook tags in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_tags = 29;</code>
*
* @param index The index of the element to return.
* @return The webhookTags at the given index.
*/
java.lang.String getWebhookTags(int index);
/**
*
*
* <pre>
* The list of webhook tags in the order of call sequence.
* </pre>
*
* <code>repeated string webhook_tags = 29;</code>
*
* @param index The index of the value to return.
* @return The bytes of the webhookTags at the given index.
*/
com.google.protobuf.ByteString getWebhookTagsBytes(int index);
/**
*
*
* <pre>
* The list of webhook call status in the order of call sequence.
* </pre>
*
* <code>repeated .google.rpc.Status webhook_statuses = 13;</code>
*/
java.util.List<com.google.rpc.Status> getWebhookStatusesList();
/**
*
*
* <pre>
* The list of webhook call status in the order of call sequence.
* </pre>
*
* <code>repeated .google.rpc.Status webhook_statuses = 13;</code>
*/
com.google.rpc.Status getWebhookStatuses(int index);
/**
*
*
* <pre>
* The list of webhook call status in the order of call sequence.
* </pre>
*
* <code>repeated .google.rpc.Status webhook_statuses = 13;</code>
*/
int getWebhookStatusesCount();
/**
*
*
* <pre>
* The list of webhook call status in the order of call sequence.
* </pre>
*
* <code>repeated .google.rpc.Status webhook_statuses = 13;</code>
*/
java.util.List<? extends com.google.rpc.StatusOrBuilder> getWebhookStatusesOrBuilderList();
/**
*
*
* <pre>
* The list of webhook call status in the order of call sequence.
* </pre>
*
* <code>repeated .google.rpc.Status webhook_statuses = 13;</code>
*/
com.google.rpc.StatusOrBuilder getWebhookStatusesOrBuilder(int index);
/**
*
*
* <pre>
* The list of webhook payload in
* [WebhookResponse.payload][google.cloud.dialogflow.cx.v3.WebhookResponse.payload],
* in the order of call sequence. If some webhook call fails or doesn't return
* any payload, an empty `Struct` would be used instead.
* </pre>
*
* <code>repeated .google.protobuf.Struct webhook_payloads = 6;</code>
*/
java.util.List<com.google.protobuf.Struct> getWebhookPayloadsList();
/**
*
*
* <pre>
* The list of webhook payload in
* [WebhookResponse.payload][google.cloud.dialogflow.cx.v3.WebhookResponse.payload],
* in the order of call sequence. If some webhook call fails or doesn't return
* any payload, an empty `Struct` would be used instead.
* </pre>
*
* <code>repeated .google.protobuf.Struct webhook_payloads = 6;</code>
*/
com.google.protobuf.Struct getWebhookPayloads(int index);
/**
*
*
* <pre>
* The list of webhook payload in
* [WebhookResponse.payload][google.cloud.dialogflow.cx.v3.WebhookResponse.payload],
* in the order of call sequence. If some webhook call fails or doesn't return
* any payload, an empty `Struct` would be used instead.
* </pre>
*
* <code>repeated .google.protobuf.Struct webhook_payloads = 6;</code>
*/
int getWebhookPayloadsCount();
/**
*
*
* <pre>
* The list of webhook payload in
* [WebhookResponse.payload][google.cloud.dialogflow.cx.v3.WebhookResponse.payload],
* in the order of call sequence. If some webhook call fails or doesn't return
* any payload, an empty `Struct` would be used instead.
* </pre>
*
* <code>repeated .google.protobuf.Struct webhook_payloads = 6;</code>
*/
java.util.List<? extends com.google.protobuf.StructOrBuilder> getWebhookPayloadsOrBuilderList();
/**
*
*
* <pre>
* The list of webhook payload in
* [WebhookResponse.payload][google.cloud.dialogflow.cx.v3.WebhookResponse.payload],
* in the order of call sequence. If some webhook call fails or doesn't return
* any payload, an empty `Struct` would be used instead.
* </pre>
*
* <code>repeated .google.protobuf.Struct webhook_payloads = 6;</code>
*/
com.google.protobuf.StructOrBuilder getWebhookPayloadsOrBuilder(int index);
/**
*
*
* <pre>
* The current [Page][google.cloud.dialogflow.cx.v3.Page]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Page current_page = 7;</code>
*
* @return Whether the currentPage field is set.
*/
boolean hasCurrentPage();
/**
*
*
* <pre>
* The current [Page][google.cloud.dialogflow.cx.v3.Page]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Page current_page = 7;</code>
*
* @return The currentPage.
*/
com.google.cloud.dialogflow.cx.v3.Page getCurrentPage();
/**
*
*
* <pre>
* The current [Page][google.cloud.dialogflow.cx.v3.Page]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Page current_page = 7;</code>
*/
com.google.cloud.dialogflow.cx.v3.PageOrBuilder getCurrentPageOrBuilder();
/**
*
*
* <pre>
* The current [Flow][google.cloud.dialogflow.cx.v3.Flow]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Flow current_flow = 31;</code>
*
* @return Whether the currentFlow field is set.
*/
boolean hasCurrentFlow();
/**
*
*
* <pre>
* The current [Flow][google.cloud.dialogflow.cx.v3.Flow]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Flow current_flow = 31;</code>
*
* @return The currentFlow.
*/
com.google.cloud.dialogflow.cx.v3.Flow getCurrentFlow();
/**
*
*
* <pre>
* The current [Flow][google.cloud.dialogflow.cx.v3.Flow]. Some, not all
* fields are filled in this message, including but not limited to `name` and
* `display_name`.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Flow current_flow = 31;</code>
*/
com.google.cloud.dialogflow.cx.v3.FlowOrBuilder getCurrentFlowOrBuilder();
/**
*
*
* <pre>
* The [Intent][google.cloud.dialogflow.cx.v3.Intent] that matched the
* conversational query. Some, not all fields are filled in this message,
* including but not limited to: `name` and `display_name`. This field is
* deprecated, please use
* [QueryResult.match][google.cloud.dialogflow.cx.v3.QueryResult.match]
* instead.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Intent intent = 8 [deprecated = true];</code>
*
* @deprecated google.cloud.dialogflow.cx.v3.QueryResult.intent is deprecated. See
* google/cloud/dialogflow/cx/v3/session.proto;l=1049
* @return Whether the intent field is set.
*/
@java.lang.Deprecated
boolean hasIntent();
/**
*
*
* <pre>
* The [Intent][google.cloud.dialogflow.cx.v3.Intent] that matched the
* conversational query. Some, not all fields are filled in this message,
* including but not limited to: `name` and `display_name`. This field is
* deprecated, please use
* [QueryResult.match][google.cloud.dialogflow.cx.v3.QueryResult.match]
* instead.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Intent intent = 8 [deprecated = true];</code>
*
* @deprecated google.cloud.dialogflow.cx.v3.QueryResult.intent is deprecated. See
* google/cloud/dialogflow/cx/v3/session.proto;l=1049
* @return The intent.
*/
@java.lang.Deprecated
com.google.cloud.dialogflow.cx.v3.Intent getIntent();
/**
*
*
* <pre>
* The [Intent][google.cloud.dialogflow.cx.v3.Intent] that matched the
* conversational query. Some, not all fields are filled in this message,
* including but not limited to: `name` and `display_name`. This field is
* deprecated, please use
* [QueryResult.match][google.cloud.dialogflow.cx.v3.QueryResult.match]
* instead.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Intent intent = 8 [deprecated = true];</code>
*/
@java.lang.Deprecated
com.google.cloud.dialogflow.cx.v3.IntentOrBuilder getIntentOrBuilder();
/**
*
*
* <pre>
* The intent detection confidence. Values range from 0.0 (completely
* uncertain) to 1.0 (completely certain).
* This value is for informational purpose only and is only used to
* help match the best intent within the classification threshold.
* This value may change for the same end-user expression at any time due to a
* model retraining or change in implementation.
* This field is deprecated, please use
* [QueryResult.match][google.cloud.dialogflow.cx.v3.QueryResult.match]
* instead.
* </pre>
*
* <code>float intent_detection_confidence = 9 [deprecated = true];</code>
*
* @deprecated google.cloud.dialogflow.cx.v3.QueryResult.intent_detection_confidence is
* deprecated. See google/cloud/dialogflow/cx/v3/session.proto;l=1060
* @return The intentDetectionConfidence.
*/
@java.lang.Deprecated
float getIntentDetectionConfidence();
/**
*
*
* <pre>
* Intent match result, could be an intent or an event.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Match match = 15;</code>
*
* @return Whether the match field is set.
*/
boolean hasMatch();
/**
*
*
* <pre>
* Intent match result, could be an intent or an event.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Match match = 15;</code>
*
* @return The match.
*/
com.google.cloud.dialogflow.cx.v3.Match getMatch();
/**
*
*
* <pre>
* Intent match result, could be an intent or an event.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.Match match = 15;</code>
*/
com.google.cloud.dialogflow.cx.v3.MatchOrBuilder getMatchOrBuilder();
/**
*
*
* <pre>
* The free-form diagnostic info. For example, this field could contain
* webhook call latency. The fields of this data can change without notice,
* so you should not write code that depends on its structure.
*
* One of the fields is called "Alternative Matched Intents", which may
* aid with debugging. The following describes these intent results:
*
* - The list is empty if no intent was matched to end-user input.
* - Only intents that are referenced in the currently active flow are
* included.
* - The matched intent is included.
* - Other intents that could have matched end-user input, but did not match
* because they are referenced by intent routes that are out of
* [scope](https://cloud.google.com/dialogflow/cx/docs/concept/handler#scope),
* are included.
* - Other intents referenced by intent routes in scope that matched end-user
* input, but had a lower confidence score.
* </pre>
*
* <code>.google.protobuf.Struct diagnostic_info = 10;</code>
*
* @return Whether the diagnosticInfo field is set.
*/
boolean hasDiagnosticInfo();
/**
*
*
* <pre>
* The free-form diagnostic info. For example, this field could contain
* webhook call latency. The fields of this data can change without notice,
* so you should not write code that depends on its structure.
*
* One of the fields is called "Alternative Matched Intents", which may
* aid with debugging. The following describes these intent results:
*
* - The list is empty if no intent was matched to end-user input.
* - Only intents that are referenced in the currently active flow are
* included.
* - The matched intent is included.
* - Other intents that could have matched end-user input, but did not match
* because they are referenced by intent routes that are out of
* [scope](https://cloud.google.com/dialogflow/cx/docs/concept/handler#scope),
* are included.
* - Other intents referenced by intent routes in scope that matched end-user
* input, but had a lower confidence score.
* </pre>
*
* <code>.google.protobuf.Struct diagnostic_info = 10;</code>
*
* @return The diagnosticInfo.
*/
com.google.protobuf.Struct getDiagnosticInfo();
/**
*
*
* <pre>
* The free-form diagnostic info. For example, this field could contain
* webhook call latency. The fields of this data can change without notice,
* so you should not write code that depends on its structure.
*
* One of the fields is called "Alternative Matched Intents", which may
* aid with debugging. The following describes these intent results:
*
* - The list is empty if no intent was matched to end-user input.
* - Only intents that are referenced in the currently active flow are
* included.
* - The matched intent is included.
* - Other intents that could have matched end-user input, but did not match
* because they are referenced by intent routes that are out of
* [scope](https://cloud.google.com/dialogflow/cx/docs/concept/handler#scope),
* are included.
* - Other intents referenced by intent routes in scope that matched end-user
* input, but had a lower confidence score.
* </pre>
*
* <code>.google.protobuf.Struct diagnostic_info = 10;</code>
*/
com.google.protobuf.StructOrBuilder getDiagnosticInfoOrBuilder();
/**
*
*
* <pre>
* The sentiment analyss result, which depends on
* [`analyze_query_text_sentiment`]
* [google.cloud.dialogflow.cx.v3.QueryParameters.analyze_query_text_sentiment],
* specified in the request.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SentimentAnalysisResult sentiment_analysis_result = 17;
* </code>
*
* @return Whether the sentimentAnalysisResult field is set.
*/
boolean hasSentimentAnalysisResult();
/**
*
*
* <pre>
* The sentiment analyss result, which depends on
* [`analyze_query_text_sentiment`]
* [google.cloud.dialogflow.cx.v3.QueryParameters.analyze_query_text_sentiment],
* specified in the request.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SentimentAnalysisResult sentiment_analysis_result = 17;
* </code>
*
* @return The sentimentAnalysisResult.
*/
com.google.cloud.dialogflow.cx.v3.SentimentAnalysisResult getSentimentAnalysisResult();
/**
*
*
* <pre>
* The sentiment analyss result, which depends on
* [`analyze_query_text_sentiment`]
* [google.cloud.dialogflow.cx.v3.QueryParameters.analyze_query_text_sentiment],
* specified in the request.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SentimentAnalysisResult sentiment_analysis_result = 17;
* </code>
*/
com.google.cloud.dialogflow.cx.v3.SentimentAnalysisResultOrBuilder
getSentimentAnalysisResultOrBuilder();
/**
*
*
* <pre>
* Returns the current advanced settings including IVR settings. Even though
* the operations configured by these settings are performed by Dialogflow,
* the client may need to perform special logic at the moment. For example, if
* Dialogflow exports audio to Google Cloud Storage, then the client may need
* to wait for the resulting object to appear in the bucket before proceeding.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.AdvancedSettings advanced_settings = 21;</code>
*
* @return Whether the advancedSettings field is set.
*/
boolean hasAdvancedSettings();
/**
*
*
* <pre>
* Returns the current advanced settings including IVR settings. Even though
* the operations configured by these settings are performed by Dialogflow,
* the client may need to perform special logic at the moment. For example, if
* Dialogflow exports audio to Google Cloud Storage, then the client may need
* to wait for the resulting object to appear in the bucket before proceeding.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.AdvancedSettings advanced_settings = 21;</code>
*
* @return The advancedSettings.
*/
com.google.cloud.dialogflow.cx.v3.AdvancedSettings getAdvancedSettings();
/**
*
*
* <pre>
* Returns the current advanced settings including IVR settings. Even though
* the operations configured by these settings are performed by Dialogflow,
* the client may need to perform special logic at the moment. For example, if
* Dialogflow exports audio to Google Cloud Storage, then the client may need
* to wait for the resulting object to appear in the bucket before proceeding.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.AdvancedSettings advanced_settings = 21;</code>
*/
com.google.cloud.dialogflow.cx.v3.AdvancedSettingsOrBuilder getAdvancedSettingsOrBuilder();
/**
*
*
* <pre>
* Indicates whether the Thumbs up/Thumbs down rating controls are need to be
* shown for the response in the Dialogflow Messenger widget.
* </pre>
*
* <code>bool allow_answer_feedback = 32;</code>
*
* @return The allowAnswerFeedback.
*/
boolean getAllowAnswerFeedback();
/**
*
*
* <pre>
* Optional. Data store connection feature output signals.
* Filled only when data stores are involved in serving the query.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.DataStoreConnectionSignals data_store_connection_signals = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the dataStoreConnectionSignals field is set.
*/
boolean hasDataStoreConnectionSignals();
/**
*
*
* <pre>
* Optional. Data store connection feature output signals.
* Filled only when data stores are involved in serving the query.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.DataStoreConnectionSignals data_store_connection_signals = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The dataStoreConnectionSignals.
*/
com.google.cloud.dialogflow.cx.v3.DataStoreConnectionSignals getDataStoreConnectionSignals();
/**
*
*
* <pre>
* Optional. Data store connection feature output signals.
* Filled only when data stores are involved in serving the query.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.DataStoreConnectionSignals data_store_connection_signals = 35 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.dialogflow.cx.v3.DataStoreConnectionSignalsOrBuilder
getDataStoreConnectionSignalsOrBuilder();
com.google.cloud.dialogflow.cx.v3.QueryResult.QueryCase getQueryCase();
}
|
googleapis/google-cloud-java | 35,867 | java-dialogflow/google-cloud-dialogflow/src/main/java/com/google/cloud/dialogflow/v2beta1/SessionsClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2beta1;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.BidiStreamingCallable;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.v2beta1.stub.SessionsStub;
import com.google.cloud.dialogflow.v2beta1.stub.SessionsStubSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: A service used for session interactions.
*
* <p>For more information, see the [API interactions
* guide](https://cloud.google.com/dialogflow/docs/api-overview).
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* SessionName session = SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]");
* QueryInput queryInput = QueryInput.newBuilder().build();
* DetectIntentResponse response = sessionsClient.detectIntent(session, queryInput);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the SessionsClient object to clean up resources such as
* threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> DetectIntent</td>
* <td><p> Processes a natural language query and returns structured, actionable data as a result. This method is not idempotent, because it may cause contexts and session entity types to be updated, which in turn might affect results of future queries.
* <p> If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI products now or in the future, consider using [AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] instead of `DetectIntent`. `AnalyzeContent` has additional functionality for Agent Assist and other CCAI products.
* <p> Note: Always use agent versions for production traffic. See [Versions and environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> detectIntent(DetectIntentRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> detectIntent(SessionName session, QueryInput queryInput)
* <li><p> detectIntent(String session, QueryInput queryInput)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> detectIntentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> StreamingDetectIntent</td>
* <td><p> Processes a natural language query in audio format in a streaming fashion and returns structured, actionable data as a result. This method is only available via the gRPC API (not REST).
* <p> If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI products now or in the future, consider using [StreamingAnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent] instead of `StreamingDetectIntent`. `StreamingAnalyzeContent` has additional functionality for Agent Assist and other CCAI products.
* <p> Note: Always use agent versions for production traffic. See [Versions and environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).</td>
* <td>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> streamingDetectIntentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListLocations</td>
* <td><p> Lists information about the supported locations for this service.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listLocations(ListLocationsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listLocationsPagedCallable()
* <li><p> listLocationsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetLocation</td>
* <td><p> Gets information about a location.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getLocation(GetLocationRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getLocationCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of SessionsSettings to create().
* For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SessionsSettings sessionsSettings =
* SessionsSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* SessionsClient sessionsClient = SessionsClient.create(sessionsSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SessionsSettings sessionsSettings =
* SessionsSettings.newBuilder().setEndpoint(myEndpoint).build();
* SessionsClient sessionsClient = SessionsClient.create(sessionsSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SessionsSettings sessionsSettings = SessionsSettings.newHttpJsonBuilder().build();
* SessionsClient sessionsClient = SessionsClient.create(sessionsSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class SessionsClient implements BackgroundResource {
private final SessionsSettings settings;
private final SessionsStub stub;
/** Constructs an instance of SessionsClient with default settings. */
public static final SessionsClient create() throws IOException {
return create(SessionsSettings.newBuilder().build());
}
/**
* Constructs an instance of SessionsClient, using the given settings. The channels are created
* based on the settings passed in, or defaults for any settings that are not set.
*/
public static final SessionsClient create(SessionsSettings settings) throws IOException {
return new SessionsClient(settings);
}
/**
* Constructs an instance of SessionsClient, using the given stub for making calls. This is for
* advanced usage - prefer using create(SessionsSettings).
*/
public static final SessionsClient create(SessionsStub stub) {
return new SessionsClient(stub);
}
/**
* Constructs an instance of SessionsClient, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected SessionsClient(SessionsSettings settings) throws IOException {
this.settings = settings;
this.stub = ((SessionsStubSettings) settings.getStubSettings()).createStub();
}
protected SessionsClient(SessionsStub stub) {
this.settings = null;
this.stub = stub;
}
public final SessionsSettings getSettings() {
return settings;
}
public SessionsStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Processes a natural language query and returns structured, actionable data as a result. This
* method is not idempotent, because it may cause contexts and session entity types to be updated,
* which in turn might affect results of future queries.
*
* <p>If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI
* products now or in the future, consider using
* [AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] instead of
* `DetectIntent`. `AnalyzeContent` has additional functionality for Agent Assist and other CCAI
* products.
*
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* SessionName session = SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]");
* QueryInput queryInput = QueryInput.newBuilder().build();
* DetectIntentResponse response = sessionsClient.detectIntent(session, queryInput);
* }
* }</pre>
*
* @param session Required. The name of the session this query is sent to. Supported formats: -
* `projects/<Project ID>/agent/sessions/<Session ID>, - `projects/<Project
* ID>/locations/<Location ID>/agent/sessions/<Session ID>`, -
* `projects/<Project ID>/agent/environments/<Environment ID>/users/<User
* ID>/sessions/<Session ID>`, - `projects/<Project ID>/locations/<Location
* ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session
* ID>`,
* <p>If `Location ID` is not specified we assume default 'us' location. If `Environment ID`
* is not specified, we assume default 'draft' environment (`Environment ID` might be referred
* to as environment name at some places). If `User ID` is not specified, we are using "-".
* It's up to the API caller to choose an appropriate `Session ID` and `User Id`. They can be
* a random number or some type of user and session identifiers (preferably hashed). The
* length of the `Session ID` and `User ID` must not exceed 36 characters. For more
* information, see the [API interactions
* guide](https://cloud.google.com/dialogflow/docs/api-overview).
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
* @param queryInput Required. The input specification. It can be set to:
* <p>1. an audio config which instructs the speech recognizer how to process the speech
* audio,
* <p>2. a conversational query in the form of text, or
* <p>3. an event that specifies which intent to trigger.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final DetectIntentResponse detectIntent(SessionName session, QueryInput queryInput) {
DetectIntentRequest request =
DetectIntentRequest.newBuilder()
.setSession(session == null ? null : session.toString())
.setQueryInput(queryInput)
.build();
return detectIntent(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Processes a natural language query and returns structured, actionable data as a result. This
* method is not idempotent, because it may cause contexts and session entity types to be updated,
* which in turn might affect results of future queries.
*
* <p>If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI
* products now or in the future, consider using
* [AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] instead of
* `DetectIntent`. `AnalyzeContent` has additional functionality for Agent Assist and other CCAI
* products.
*
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* String session = SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]").toString();
* QueryInput queryInput = QueryInput.newBuilder().build();
* DetectIntentResponse response = sessionsClient.detectIntent(session, queryInput);
* }
* }</pre>
*
* @param session Required. The name of the session this query is sent to. Supported formats: -
* `projects/<Project ID>/agent/sessions/<Session ID>, - `projects/<Project
* ID>/locations/<Location ID>/agent/sessions/<Session ID>`, -
* `projects/<Project ID>/agent/environments/<Environment ID>/users/<User
* ID>/sessions/<Session ID>`, - `projects/<Project ID>/locations/<Location
* ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session
* ID>`,
* <p>If `Location ID` is not specified we assume default 'us' location. If `Environment ID`
* is not specified, we assume default 'draft' environment (`Environment ID` might be referred
* to as environment name at some places). If `User ID` is not specified, we are using "-".
* It's up to the API caller to choose an appropriate `Session ID` and `User Id`. They can be
* a random number or some type of user and session identifiers (preferably hashed). The
* length of the `Session ID` and `User ID` must not exceed 36 characters. For more
* information, see the [API interactions
* guide](https://cloud.google.com/dialogflow/docs/api-overview).
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
* @param queryInput Required. The input specification. It can be set to:
* <p>1. an audio config which instructs the speech recognizer how to process the speech
* audio,
* <p>2. a conversational query in the form of text, or
* <p>3. an event that specifies which intent to trigger.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final DetectIntentResponse detectIntent(String session, QueryInput queryInput) {
DetectIntentRequest request =
DetectIntentRequest.newBuilder().setSession(session).setQueryInput(queryInput).build();
return detectIntent(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Processes a natural language query and returns structured, actionable data as a result. This
* method is not idempotent, because it may cause contexts and session entity types to be updated,
* which in turn might affect results of future queries.
*
* <p>If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI
* products now or in the future, consider using
* [AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] instead of
* `DetectIntent`. `AnalyzeContent` has additional functionality for Agent Assist and other CCAI
* products.
*
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* DetectIntentRequest request =
* DetectIntentRequest.newBuilder()
* .setSession(SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]").toString())
* .setQueryParams(QueryParameters.newBuilder().build())
* .setQueryInput(QueryInput.newBuilder().build())
* .setOutputAudioConfig(OutputAudioConfig.newBuilder().build())
* .setOutputAudioConfigMask(FieldMask.newBuilder().build())
* .setInputAudio(ByteString.EMPTY)
* .build();
* DetectIntentResponse response = sessionsClient.detectIntent(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final DetectIntentResponse detectIntent(DetectIntentRequest request) {
return detectIntentCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Processes a natural language query and returns structured, actionable data as a result. This
* method is not idempotent, because it may cause contexts and session entity types to be updated,
* which in turn might affect results of future queries.
*
* <p>If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI
* products now or in the future, consider using
* [AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] instead of
* `DetectIntent`. `AnalyzeContent` has additional functionality for Agent Assist and other CCAI
* products.
*
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* DetectIntentRequest request =
* DetectIntentRequest.newBuilder()
* .setSession(SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]").toString())
* .setQueryParams(QueryParameters.newBuilder().build())
* .setQueryInput(QueryInput.newBuilder().build())
* .setOutputAudioConfig(OutputAudioConfig.newBuilder().build())
* .setOutputAudioConfigMask(FieldMask.newBuilder().build())
* .setInputAudio(ByteString.EMPTY)
* .build();
* ApiFuture<DetectIntentResponse> future =
* sessionsClient.detectIntentCallable().futureCall(request);
* // Do something.
* DetectIntentResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<DetectIntentRequest, DetectIntentResponse> detectIntentCallable() {
return stub.detectIntentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Processes a natural language query in audio format in a streaming fashion and returns
* structured, actionable data as a result. This method is only available via the gRPC API (not
* REST).
*
* <p>If you might use [Agent Assist](https://cloud.google.com/dialogflow/docs/#aa) or other CCAI
* products now or in the future, consider using
* [StreamingAnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent]
* instead of `StreamingDetectIntent`. `StreamingAnalyzeContent` has additional functionality for
* Agent Assist and other CCAI products.
*
* <p>Note: Always use agent versions for production traffic. See [Versions and
* environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* BidiStream<StreamingDetectIntentRequest, StreamingDetectIntentResponse> bidiStream =
* sessionsClient.streamingDetectIntentCallable().call();
* StreamingDetectIntentRequest request =
* StreamingDetectIntentRequest.newBuilder()
* .setSession(SessionName.ofProjectSessionName("[PROJECT]", "[SESSION]").toString())
* .setQueryParams(QueryParameters.newBuilder().build())
* .setQueryInput(QueryInput.newBuilder().build())
* .setSingleUtterance(true)
* .setOutputAudioConfig(OutputAudioConfig.newBuilder().build())
* .setOutputAudioConfigMask(FieldMask.newBuilder().build())
* .setInputAudio(ByteString.EMPTY)
* .setEnableDebuggingInfo(true)
* .build();
* bidiStream.send(request);
* for (StreamingDetectIntentResponse response : bidiStream) {
* // Do something when a response is received.
* }
* }
* }</pre>
*/
public final BidiStreamingCallable<StreamingDetectIntentRequest, StreamingDetectIntentResponse>
streamingDetectIntentCallable() {
return stub.streamingDetectIntentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Location element : sessionsClient.listLocations(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) {
return listLocationsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Location> future = sessionsClient.listLocationsPagedCallable().futureCall(request);
* // Do something.
* for (Location element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return stub.listLocationsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListLocationsResponse response = sessionsClient.listLocationsCallable().call(request);
* for (Location element : response.getLocationsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return stub.listLocationsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* Location response = sessionsClient.getLocation(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Location getLocation(GetLocationRequest request) {
return getLocationCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SessionsClient sessionsClient = SessionsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* ApiFuture<Location> future = sessionsClient.getLocationCallable().futureCall(request);
* // Do something.
* Location response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return stub.getLocationCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListLocationsPagedResponse
extends AbstractPagedListResponse<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
public static ApiFuture<ListLocationsPagedResponse> createAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
ApiFuture<ListLocationsPage> futurePage =
ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListLocationsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListLocationsPagedResponse(ListLocationsPage page) {
super(page, ListLocationsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListLocationsPage
extends AbstractPage<
ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> {
private ListLocationsPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
super(context, response);
}
private static ListLocationsPage createEmptyPage() {
return new ListLocationsPage(null, null);
}
@Override
protected ListLocationsPage createPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
return new ListLocationsPage(context, response);
}
@Override
public ApiFuture<ListLocationsPage> createPageAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListLocationsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListLocationsFixedSizeCollection createEmptyCollection() {
return new ListLocationsFixedSizeCollection(null, 0);
}
@Override
protected ListLocationsFixedSizeCollection createCollection(
List<ListLocationsPage> pages, int collectionSize) {
return new ListLocationsFixedSizeCollection(pages, collectionSize);
}
}
}
|
googleapis/google-cloud-java | 35,589 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SecurityPolicyRulePreconfiguredWafConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig}
*/
public final class SecurityPolicyRulePreconfiguredWafConfig
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig)
SecurityPolicyRulePreconfiguredWafConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use SecurityPolicyRulePreconfiguredWafConfig.newBuilder() to construct.
private SecurityPolicyRulePreconfiguredWafConfig(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SecurityPolicyRulePreconfiguredWafConfig() {
exclusions_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SecurityPolicyRulePreconfiguredWafConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRulePreconfiguredWafConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRulePreconfiguredWafConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig.class,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig.Builder.class);
}
public static final int EXCLUSIONS_FIELD_NUMBER = 208665701;
@SuppressWarnings("serial")
private java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>
exclusions_;
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
@java.lang.Override
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>
getExclusionsList() {
return exclusions_;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends
com.google.cloud.compute.v1
.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder>
getExclusionsOrBuilderList() {
return exclusions_;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
@java.lang.Override
public int getExclusionsCount() {
return exclusions_.size();
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion
getExclusions(int index) {
return exclusions_.get(index);
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder
getExclusionsOrBuilder(int index) {
return exclusions_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < exclusions_.size(); i++) {
output.writeMessage(208665701, exclusions_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < exclusions_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(208665701, exclusions_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig other =
(com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig) obj;
if (!getExclusionsList().equals(other.getExclusionsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getExclusionsCount() > 0) {
hash = (37 * hash) + EXCLUSIONS_FIELD_NUMBER;
hash = (53 * hash) + getExclusionsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig)
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRulePreconfiguredWafConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRulePreconfiguredWafConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig.class,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (exclusionsBuilder_ == null) {
exclusions_ = java.util.Collections.emptyList();
} else {
exclusions_ = null;
exclusionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SecurityPolicyRulePreconfiguredWafConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig build() {
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig buildPartial() {
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig result =
new com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig result) {
if (exclusionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
exclusions_ = java.util.Collections.unmodifiableList(exclusions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.exclusions_ = exclusions_;
} else {
result.exclusions_ = exclusionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig) {
return mergeFrom(
(com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig other) {
if (other
== com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
.getDefaultInstance()) return this;
if (exclusionsBuilder_ == null) {
if (!other.exclusions_.isEmpty()) {
if (exclusions_.isEmpty()) {
exclusions_ = other.exclusions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureExclusionsIsMutable();
exclusions_.addAll(other.exclusions_);
}
onChanged();
}
} else {
if (!other.exclusions_.isEmpty()) {
if (exclusionsBuilder_.isEmpty()) {
exclusionsBuilder_.dispose();
exclusionsBuilder_ = null;
exclusions_ = other.exclusions_;
bitField0_ = (bitField0_ & ~0x00000001);
exclusionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getExclusionsFieldBuilder()
: null;
} else {
exclusionsBuilder_.addAllMessages(other.exclusions_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1669325610:
{
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion m =
input.readMessage(
com.google.cloud.compute.v1
.SecurityPolicyRulePreconfiguredWafConfigExclusion.parser(),
extensionRegistry);
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
exclusions_.add(m);
} else {
exclusionsBuilder_.addMessage(m);
}
break;
} // case 1669325610
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>
exclusions_ = java.util.Collections.emptyList();
private void ensureExclusionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
exclusions_ =
new java.util.ArrayList<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>(
exclusions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder>
exclusionsBuilder_;
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>
getExclusionsList() {
if (exclusionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(exclusions_);
} else {
return exclusionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public int getExclusionsCount() {
if (exclusionsBuilder_ == null) {
return exclusions_.size();
} else {
return exclusionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion
getExclusions(int index) {
if (exclusionsBuilder_ == null) {
return exclusions_.get(index);
} else {
return exclusionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder setExclusions(
int index,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion value) {
if (exclusionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExclusionsIsMutable();
exclusions_.set(index, value);
onChanged();
} else {
exclusionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder setExclusions(
int index,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
builderForValue) {
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
exclusions_.set(index, builderForValue.build());
onChanged();
} else {
exclusionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder addExclusions(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion value) {
if (exclusionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExclusionsIsMutable();
exclusions_.add(value);
onChanged();
} else {
exclusionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder addExclusions(
int index,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion value) {
if (exclusionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExclusionsIsMutable();
exclusions_.add(index, value);
onChanged();
} else {
exclusionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder addExclusions(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
builderForValue) {
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
exclusions_.add(builderForValue.build());
onChanged();
} else {
exclusionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder addExclusions(
int index,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
builderForValue) {
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
exclusions_.add(index, builderForValue.build());
onChanged();
} else {
exclusionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder addAllExclusions(
java.lang.Iterable<
? extends
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion>
values) {
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, exclusions_);
onChanged();
} else {
exclusionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder clearExclusions() {
if (exclusionsBuilder_ == null) {
exclusions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
exclusionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public Builder removeExclusions(int index) {
if (exclusionsBuilder_ == null) {
ensureExclusionsIsMutable();
exclusions_.remove(index);
onChanged();
} else {
exclusionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
getExclusionsBuilder(int index) {
return getExclusionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder
getExclusionsOrBuilder(int index) {
if (exclusionsBuilder_ == null) {
return exclusions_.get(index);
} else {
return exclusionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public java.util.List<
? extends
com.google.cloud.compute.v1
.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder>
getExclusionsOrBuilderList() {
if (exclusionsBuilder_ != null) {
return exclusionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(exclusions_);
}
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
addExclusionsBuilder() {
return getExclusionsFieldBuilder()
.addBuilder(
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion
.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder
addExclusionsBuilder(int index) {
return getExclusionsFieldBuilder()
.addBuilder(
index,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion
.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of exclusions to apply during preconfigured WAF evaluation.
* </pre>
*
* <code>
* repeated .google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion exclusions = 208665701;
* </code>
*/
public java.util.List<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder>
getExclusionsBuilderList() {
return getExclusionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion.Builder,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder>
getExclusionsFieldBuilder() {
if (exclusionsBuilder_ == null) {
exclusionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion,
com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfigExclusion
.Builder,
com.google.cloud.compute.v1
.SecurityPolicyRulePreconfiguredWafConfigExclusionOrBuilder>(
exclusions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
exclusions_ = null;
}
return exclusionsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig)
private static final com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig();
}
public static com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SecurityPolicyRulePreconfiguredWafConfig> PARSER =
new com.google.protobuf.AbstractParser<SecurityPolicyRulePreconfiguredWafConfig>() {
@java.lang.Override
public SecurityPolicyRulePreconfiguredWafConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SecurityPolicyRulePreconfiguredWafConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SecurityPolicyRulePreconfiguredWafConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.SecurityPolicyRulePreconfiguredWafConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 35,821 | classlib/modules/misc/src/main/java/org/apache/harmony/misc/accessors/ObjectAccessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.misc.accessors;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.util.Hashtable;
/**
* Provides the direct access to classes and objects. This class allows to overcome
* the certain limitations of the reflection API, such as setting the constant fields or
* allocating objects without calling its constructor. The following groups of operations
* are supported:
* <ul>
* <li><b>getField/MethodID,getStaticField/MethodID</b> - used to get ID for methods and fields.
* <li><b>getXXX/setXXX</b> - used to read and write non-static fields in objects
* (XXX stands for a field type);
* <li><b>getStaticXXX/setStaticXXX</b> - used to read and write static fields in classes
* (XXX stands for a field type);
* <li><b>invokeStaticXXX</b> - used to call static methods in a class (XXX means return type);
* <li><b>invokeVirtualXXX</b> - used to call virtual methods for object (XXX means return type);
* <li><b>invokeNonVirtualXXX</b> - used to call non-virtual methods for
* the given class and object (XXX means return type);
* <li><b>allocateObject, newInstance</b> - provides a fine control over object
* construction;
* <li><b>hasStaticInitializer</b> - informational methods about class;
* <li><b>monitorEnter/Exit</b> - enter/exit monitor associated with the given object
* </ul>
* Fields and methods are identified in the class with help of ID's those actual meaning
* is implementation dependent.
* Depending on a platform, ID's may represent the real offets in the physical memory,
* though it is not always guaranteed. Unlike the {@link ArrayAccessor} class, users should not rely on
* any correspondence between ID's and memory address space. However, it is guaranteed that ID's, once
* obtained, are valid during the whole lifetime of the given class and can equally be
* applied for all its instances.
* <p>
* No security checks are made while reading and writing object's fields, as well as while calling object's methods. In addition to
* variables, this class also allows to set the values for constant fields within an object.
* <p>
* For accessing Array objects, please use the {@link ArrayAccessor} class.
*
* @see ArrayAccessor
*/
public class ObjectAccessor {
/**
* This class complies to singleton pattern.
*/
private static ObjectAccessor instance;
static ObjectAccessor getInstance() {
if (instance == null) {
System.loadLibrary("accessors"); //$NON-NLS-1$
instance = new ObjectAccessor();
}
return instance;
}
private ObjectAccessor(){}
private Hashtable methods = new Hashtable();
//Boolean field access
/**
* Reads a boolean field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return boolean field value
* @see #setBoolean(Object, long, boolean)
*/
public final native boolean getBoolean(Object o, long fieldID);
/**
* Reads a boolean field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return boolean field value
* @see #setStaticBoolean(Class, long, boolean)
*/
public final native boolean getStaticBoolean(Class c, long fieldID);
/**
* Writes a boolean field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getBoolean(Object, long)
*/
public final native void setBoolean(Object o, long fieldID, boolean value);
/**
* Writes a boolean field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticBoolean(Class, long)
*/
public final native void setStaticBoolean(Class c, long fieldID, boolean value);
//Byte field access
/**
* Reads a byte field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return byte field value
* @see #setByte(Object, long, byte)
*/
public final native byte getByte(Object o, long fieldID);
/**
* Reads a byte field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return byte field value
* @see #setStaticByte(Class, long, byte)
*/
public final native byte getStaticByte(Class c, long fieldID);
/**
* Writes a byte field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getByte(Object, long)
*/
public final native void setByte(Object o, long fieldID, byte value);
/**
* Writes a byte field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticByte(Class, long)
*/
public final native void setStaticByte(Class c, long fieldID, byte value);
//Char field access
/**
* Reads a char field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return char field value
* @see #setChar(Object, long, char)
*/
public final native char getChar(Object o, long fieldID);
/**
* Reads a char field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return char field value
* @see #setStaticChar(Class, long, char)
*/
public final native char getStaticChar(Class c, long fieldID);
/**
* Writes a char field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getChar(Object, long)
*/
public final native void setChar(Object o, long fieldID, char value);
/**
* Writes a char field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticChar(Class, long)
*/
public final native void setStaticChar(Class c, long fieldID, char value);
//Short field access
/**
* Reads a short field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return short field value
* @see #setShort(Object, long, short)
*/
public final native short getShort(Object o, long fieldID);
/**
* Reads a short field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return short field value
* @see #setStaticShort(Class, long, short)
*/
public final native short getStaticShort(Class c, long fieldID);
/**
* Writes a short field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getShort(Object, long)
*/
public final native void setShort(Object o, long fieldID, short value);
/**
* Writes a short field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticShort(Class, long)
*/
public final native void setStaticShort(Class c, long fieldID, short value);
//Int field access
/**
* Reads a int field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return int field value
* @see #setInt(Object, long, int)
*/
public final native int getInt(Object o, long fieldID);
/**
* Reads a int field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return int field value
* @see #setStaticInt(Class, long, int)
*/
public final native int getStaticInt(Class c, long fieldID);
/**
* Writes a int field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getInt(Object, long)
*/
public final native void setInt(Object o, long fieldID, int value);
/**
* Writes a int field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticInt(Class, long)
*/
public final native void setStaticInt(Class c, long fieldID, int value);
//Long field access
/**
* Reads a long field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return long field value
* @see #setLong(Object, long, long)
*/
public final native long getLong(Object o, long fieldID);
/**
* Reads a long field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return long field value
* @see #setStaticLong(Class, long, long)
*/
public final native long getStaticLong(Class c, long fieldID);
/**
* Writes a long field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getLong(Object, long)
*/
public final native void setLong(Object o, long fieldID, long value);
/**
* Writes a long field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticLong(Class, long)
*/
public final native void setStaticLong(Class c, long fieldID, long value);
//Float field access
/**
* Reads a float field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return float field value
* @see #setFloat(Object, long, float)
*/
public final native float getFloat(Object o, long fieldID);
/**
* Reads a float field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return float field value
* @see #setStaticFloat(Class, long, float)
*/
public final native float getStaticFloat(Class c, long fieldID);
/**
* Writes a float field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getFloat(Object, long)
*/
public final native void setFloat(Object o, long fieldID, float value);
/**
* Writes a float field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticFloat(Class, long)
*/
public final native void setStaticFloat(Class c, long fieldID, float value);
//Double field access
/**
* Reads a double field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return double field value
* @see #setDouble(Object, long, double)
*/
public final native double getDouble(Object o, long fieldID);
/**
* Reads a double field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return double field value
* @see #setStaticDouble(Class, long, double)
*/
public final native double getStaticDouble(Class c, long fieldID);
/**
* Writes a double field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getDouble(Object, long)
*/
public final native void setDouble(Object o, long fieldID, double value);
/**
* Writes a double field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticDouble(Class, long)
*/
public final native void setStaticDouble(Class c, long fieldID, double value);
//Object field access
/**
* Reads an Object field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be read
* @param fieldID field ID.
* @return Object field value
* @see #setObject(Object, long, Object)
*/
public final native Object getObject(Object o, long fieldID);
/**
* Reads an Object field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID.
* @return Object field value
* @see #setStaticObject(Class, long, Object)
*/
public final native Object getStaticObject(Class c, long fieldID);
/**
* Writes an Object field for the given object.
* Use the {@link #getFieldID(Field)} method to obtain the ID for the specific field
* within a class. For static fields, use the {@link #getStaticFieldID(Class, String)} method.
* @param o object those field needs to be set.
* @param fieldID field ID
* @param value field value to be set
* @see #getObject(Object, long)
*/
public final native void setObject(Object o, long fieldID, Object value);
/**
* Writes an Object field for the given class.
* Use the {@link #getStaticFieldID(Class, String)} method to obtain the ID for the specific field
* within a class.
* @param c class those field needs to be read
* @param fieldID field ID
* @param value field value to be set
* @see #getStaticObject(Class, long)
*/
public final native void setStaticObject(Class c, long fieldID, Object value);
/**
* Returns the ID for a field with the given name.
* For static fields, use the {@link #getStaticFieldID(Class, String)} method.
*
* @param c class containing field
* @param name field name
* @return field ID
* @throws NoSuchFieldError if the field could not be found
*/
public final long getFieldID(Class c, String name) {
try {
return getFieldID(c.getDeclaredField(name));
} catch (SecurityException e) {
throw new RuntimeException(e);
} catch (NoSuchFieldException e) {
// Try to find the field in the superclass
Class sc = c.getSuperclass();
if (sc == null) {
throw new NoSuchFieldError(e.getMessage());
} else {
return getFieldID(sc, name);
}
}
}
/**
* Returns the ID for a field with the given name and type.
* This may be faster than getting the field using it's name only.
* For static fields, use the {@link #getStaticFieldID(Class, String)} method.
*
* @param c class containing field
* @param name field name
* @param type field type
* @return field ID
* @throws NoSuchFieldError if the field could not be found
*/
public final long getFieldID(Class c, String name, Class type) {
return getFieldID(c, name, getSystemName(type));
}
/**
* Returns the ID for the static field with the given name.
*
* @param c class containing static field
* @param name field name
* @return field ID
* @throws NoSuchFieldError if the field could not be found
*/
public final long getStaticFieldID(Class c, String name) {
return getFieldID(c, name);
}
/**
* Returns the ID for the static field with the given name and type.
* This may be faster than getting the field using it's name only.
* @param c class containing static field
* @param name field name
* @param type field type
* @return field ID
* @throws NoSuchFieldError if the field could not be found
*/
public final long getStaticFieldID(Class c, String name, Class type) {
return getStaticFieldID(c, name, getSystemName(type));
}
/**
* Returns the ID for the reflected field.
*
* @param f reflected field
* @return field ID
*/
public final native long getFieldID(Field f);
/**
* Returns the ID for the reflected static field.
* Default implementation delegates to the
* {@link #getFieldID(Field)} call.
*
* @param f reflected field
* @return field ID
*/
public final long getStaticFieldID(Field f) {
return getFieldID(f);
}
/**
* Returns the ID for the specified method or constructor.
* Use class constants for primitive parameter types. For example,
* for <code>byte</code> type use the {@link java.lang.Byte#TYPE} class.
* @param c a class the method belongs to
* @param name method name or <code>null</code> in case of constructor
* @param parameterTypes array of parameter types.
* @return method ID
* @throws NoSuchMethodError if the method could not be found
* @see #invokeVirtualVoid(Object, long, Object[])
* @see #invokeNonVirtualVoid(Class, Object, long, Object[])
* @see #newInstance(Class, long, Object[])
*/
public final long getMethodID(Class c, String name, Class[] parameterTypes) {
return getMethodID(c, name, parameterTypes, false);
}
private static long getMethodID(Class c, String name, Class[] parameterTypes, boolean isStatic) {
try {
String sig = "("; //$NON-NLS-1$
for (int i=0; i < parameterTypes.length; i++) {
sig += getSystemName(parameterTypes[i]);
}
sig += ")"; //$NON-NLS-1$
if (name == null) {
name = "<init>"; //$NON-NLS-1$
sig += "V"; //$NON-NLS-1$
} else {
Method m = c.getDeclaredMethod(name, parameterTypes);
sig += getSystemName(m.getReturnType());
}
return isStatic ? getStaticMethodID0(c, name, sig) : getMethodID0(c, name, sig);
} catch (SecurityException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new NoSuchMethodError(e.getMessage());
}
}
/**
* Returns the ID for the specified static method.
* Use class constants for primitive parameter types. For example,
* for <code>byte</code> type use the {@link java.lang.Byte#TYPE} class.
* @param c a class the method belongs to
* @param name method name or <code>null</code> in case of constructor
* @param parameterTypes array of parameter types.
* @return static method ID
* @throws NoSuchMethodError if the method could not be found
* @see #invokeVirtualVoid(Object, long, Object[])
*/
public final long getStaticMethodID(Class c, String name, Class[] parameterTypes) {
return getMethodID(c, name, parameterTypes, true);
}
/**
* Returns the ID for the reflected constructor.
* @param c reflected constructor
* @return method ID
* @see #invokeVirtualVoid(Object, long, Object[])
* @see #newInstance(Class, long, Object[])
*/
public final long getMethodID(Constructor c) {
return getMethodID0(c);
}
/**
* Returns the ID for the reflected method.
* @param m reflected method
* @return method ID
* @see #invokeVirtualVoid(Object, long, Object[])
* @see #invokeNonVirtualVoid(Class, Object, long, Object[])
* @see #invokeStaticVoid(Class, long, Object[])
* @see #newInstance(Class, long, Object[])
*/
public final long getMethodID(Method m) {
return getMethodID0(m);
}
private static native long getMethodID0(Class c, String name, String sig);
private static native long getStaticMethodID0(Class c, String name, String sig);
private static native long getMethodID0(Member m);
private Class[] objectArrayTypes(Object[] args) {
Class[] res = new Class[args.length];
for (int i=0; i<args.length; i++) {
res[i] = args[i] == null ? null : args[i].getClass();
}
return res;
}
/**
* Invokes static void method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method is defined
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native void invokeStaticVoid(Class c, long methodID, Object[] args);
/**
* Invokes void method or constructor with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param methodID method ID
* @param obj an object those method or constructor needs to be called
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native void invokeVirtualVoid(Object obj, long methodID, Object[] args);
/**
* Invokes a non-virtual void method or constructor with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method or constructor is defined
* @param obj an object those method or constructor needs to be called
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native void invokeNonVirtualVoid(Class c, Object obj, long methodID, Object[] args);
/**
* Invokes a static reference-type method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method is defined
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native Object invokeStaticObject(Class c, long methodID, Object[] args);
/**
* Invokes reference-type method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param methodID method ID
* @param obj an object those method or constructor needs to be called
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native Object invokeVirtualObject(Object obj, long methodID, Object[] args);
/**
* Invokes a non-virtual reference-type method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method or constructor is defined
* @param obj an object those method or constructor needs to be called
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native Object invokeNonVirtualObject(Class c, Object obj, long methodID, Object[] args);
/**
* Invokes a static long method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method is defined
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native long invokeStaticLong(Class c, long methodID, Object[] args);
/**
* Invokes long method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param methodID method ID
* @param obj an object those method or constructor needs to be called
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native long invokeVirtualLong(Object obj, long methodID, Object[] args);
/**
* Invokes a non-virtual long method with the given ID without security check.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the method ID.
* @param c a class where method or constructor is defined
* @param obj an object those method or constructor needs to be called
* @param methodID method ID
* @param args array of arguments
* @see #getMethodID(Class, String, Class[])
*/
public final native long invokeNonVirtualLong(Class c, Object obj, long methodID, Object[] args);
/**
* Allocates new object of the given class without calling its constructor.
* Constructor can be called independently with help of {@link #invokeNonVirtualVoid(Class, Object, long, Object[])} method.
* @param c A class those object needs to be allocated.
* @return allocated object
*/
public final native Object allocateObject(Class c);
/**
* Allocates new object of class c and invokes its constructor with the given ID
* and args without security checks.
* Primitive type arguments should be wrapped with appropriate objects.
* For example, byte value should be wrapped with {@link java.lang.Byte#TYPE}.
* Use the {@link #getMethodID(Class, String, Class[])} call to obtain the constructor ID.
* @param methodID method ID
* @param c class those instance needs to be created
* @param args array of arguments
* @return allocated object
*/
public final native Object newInstance(Class c, long methodID, Object[] args);
/**
* Allocates new object of a class c and invokes noarg constructor without security check.
*
* @param c class those object needs to be created
* @return allocated object
*/
public final native Object newInstance(Class c);
/**
* Determines whether the class c has static initializer.
* @return true if class c has static initializer, false otherwise
*/
public final native boolean hasStaticInitializer(Class c);
/**
* calls monitorEnter java bytecode command. Acquire object <code>o</code> monitor
* @param o object to lock
*/
public native void monitorEnter(Object o);
/**
* calls monitorExit java bytecode command. To free object <code>o</code> monitor
* @param o object to unlock
*/
public native void monitorExit(Object o);
private static final String getSystemName(Class cls) {
if (cls == boolean.class) {
return "Z"; //$NON-NLS-1$
} else if (cls == char.class) {
return "C"; //$NON-NLS-1$
} else if (cls == byte.class) {
return "B"; //$NON-NLS-1$
} else if (cls == short.class) {
return "S"; //$NON-NLS-1$
} else if (cls == int.class) {
return "I"; //$NON-NLS-1$
} else if (cls == long.class) {
return "J"; //$NON-NLS-1$
} else if (cls == float.class) {
return "F"; //$NON-NLS-1$
} else if (cls == double.class) {
return "D"; //$NON-NLS-1$
} else if (cls == void.class) {
return "V"; //$NON-NLS-1$
} else { // Object type.
String className = cls.getName().replace('.', '/');
// Add reference to non-array reference types.
return (cls.isArray() ? className : ('L' + className + ';'));
}
}
private final native long getFieldID(Class c, String name, String sig);
private final native long getStaticFieldID(Class c, String name, String sig);
public final native long getGlobalReference(Object o);
public final native void releaseGlobalReference(long ref);
public final native Object getObjectFromReference(long ref);
}
|
googleapis/google-cloud-java | 35,561 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateSearchAds360LinkRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateSearchAds360Link RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest}
*/
public final class UpdateSearchAds360LinkRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest)
UpdateSearchAds360LinkRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSearchAds360LinkRequest.newBuilder() to construct.
private UpdateSearchAds360LinkRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSearchAds360LinkRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSearchAds360LinkRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateSearchAds360LinkRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateSearchAds360LinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.class,
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.Builder.class);
}
private int bitField0_;
public static final int SEARCH_ADS_360_LINK_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.SearchAds360Link searchAds360Link_;
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*
* @return Whether the searchAds360Link field is set.
*/
@java.lang.Override
public boolean hasSearchAds360Link() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*
* @return The searchAds360Link.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.SearchAds360Link getSearchAds360Link() {
return searchAds360Link_ == null
? com.google.analytics.admin.v1alpha.SearchAds360Link.getDefaultInstance()
: searchAds360Link_;
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.SearchAds360LinkOrBuilder
getSearchAds360LinkOrBuilder() {
return searchAds360Link_ == null
? com.google.analytics.admin.v1alpha.SearchAds360Link.getDefaultInstance()
: searchAds360Link_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSearchAds360Link());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSearchAds360Link());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest other =
(com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest) obj;
if (hasSearchAds360Link() != other.hasSearchAds360Link()) return false;
if (hasSearchAds360Link()) {
if (!getSearchAds360Link().equals(other.getSearchAds360Link())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSearchAds360Link()) {
hash = (37 * hash) + SEARCH_ADS_360_LINK_FIELD_NUMBER;
hash = (53 * hash) + getSearchAds360Link().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateSearchAds360Link RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest)
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateSearchAds360LinkRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateSearchAds360LinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.class,
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSearchAds360LinkFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
searchAds360Link_ = null;
if (searchAds360LinkBuilder_ != null) {
searchAds360LinkBuilder_.dispose();
searchAds360LinkBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateSearchAds360LinkRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest build() {
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest result =
new com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.searchAds360Link_ =
searchAds360LinkBuilder_ == null ? searchAds360Link_ : searchAds360LinkBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest other) {
if (other
== com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest.getDefaultInstance())
return this;
if (other.hasSearchAds360Link()) {
mergeSearchAds360Link(other.getSearchAds360Link());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getSearchAds360LinkFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.SearchAds360Link searchAds360Link_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.SearchAds360Link,
com.google.analytics.admin.v1alpha.SearchAds360Link.Builder,
com.google.analytics.admin.v1alpha.SearchAds360LinkOrBuilder>
searchAds360LinkBuilder_;
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*
* @return Whether the searchAds360Link field is set.
*/
public boolean hasSearchAds360Link() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*
* @return The searchAds360Link.
*/
public com.google.analytics.admin.v1alpha.SearchAds360Link getSearchAds360Link() {
if (searchAds360LinkBuilder_ == null) {
return searchAds360Link_ == null
? com.google.analytics.admin.v1alpha.SearchAds360Link.getDefaultInstance()
: searchAds360Link_;
} else {
return searchAds360LinkBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public Builder setSearchAds360Link(com.google.analytics.admin.v1alpha.SearchAds360Link value) {
if (searchAds360LinkBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
searchAds360Link_ = value;
} else {
searchAds360LinkBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public Builder setSearchAds360Link(
com.google.analytics.admin.v1alpha.SearchAds360Link.Builder builderForValue) {
if (searchAds360LinkBuilder_ == null) {
searchAds360Link_ = builderForValue.build();
} else {
searchAds360LinkBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public Builder mergeSearchAds360Link(
com.google.analytics.admin.v1alpha.SearchAds360Link value) {
if (searchAds360LinkBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& searchAds360Link_ != null
&& searchAds360Link_
!= com.google.analytics.admin.v1alpha.SearchAds360Link.getDefaultInstance()) {
getSearchAds360LinkBuilder().mergeFrom(value);
} else {
searchAds360Link_ = value;
}
} else {
searchAds360LinkBuilder_.mergeFrom(value);
}
if (searchAds360Link_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public Builder clearSearchAds360Link() {
bitField0_ = (bitField0_ & ~0x00000001);
searchAds360Link_ = null;
if (searchAds360LinkBuilder_ != null) {
searchAds360LinkBuilder_.dispose();
searchAds360LinkBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public com.google.analytics.admin.v1alpha.SearchAds360Link.Builder
getSearchAds360LinkBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSearchAds360LinkFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
public com.google.analytics.admin.v1alpha.SearchAds360LinkOrBuilder
getSearchAds360LinkOrBuilder() {
if (searchAds360LinkBuilder_ != null) {
return searchAds360LinkBuilder_.getMessageOrBuilder();
} else {
return searchAds360Link_ == null
? com.google.analytics.admin.v1alpha.SearchAds360Link.getDefaultInstance()
: searchAds360Link_;
}
}
/**
*
*
* <pre>
* The SearchAds360Link to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.SearchAds360Link search_ads_360_link = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.SearchAds360Link,
com.google.analytics.admin.v1alpha.SearchAds360Link.Builder,
com.google.analytics.admin.v1alpha.SearchAds360LinkOrBuilder>
getSearchAds360LinkFieldBuilder() {
if (searchAds360LinkBuilder_ == null) {
searchAds360LinkBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.SearchAds360Link,
com.google.analytics.admin.v1alpha.SearchAds360Link.Builder,
com.google.analytics.admin.v1alpha.SearchAds360LinkOrBuilder>(
getSearchAds360Link(), getParentForChildren(), isClean());
searchAds360Link_ = null;
}
return searchAds360LinkBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest)
private static final com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSearchAds360LinkRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSearchAds360LinkRequest>() {
@java.lang.Override
public UpdateSearchAds360LinkRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSearchAds360LinkRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSearchAds360LinkRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateSearchAds360LinkRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ozone | 35,824 | hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/replication/TestRatisContainerReplicaCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm.container.replication;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.DECOMMISSIONED;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.DECOMMISSIONING;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.ENTERING_MAINTENANCE;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.IN_MAINTENANCE;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState.IN_SERVICE;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor.THREE;
import static org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State.CLOSING;
import static org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State.OPEN;
import static org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State.QUASI_CLOSED;
import static org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State.UNHEALTHY;
import static org.apache.hadoop.hdds.scm.container.replication.ReplicationTestUtil.createContainerInfo;
import static org.apache.hadoop.hdds.scm.container.replication.ReplicationTestUtil.createContainerReplica;
import static org.apache.hadoop.hdds.scm.container.replication.ReplicationTestUtil.createReplicas;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
/**
* Class used to test the RatisContainerReplicaCount class.
*/
class TestRatisContainerReplicaCount {
@Test
void testThreeHealthyReplica() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, true, 0, false, false);
}
@Test
void testTwoHealthyReplica() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 1, false, false);
}
@Test
void testOneHealthyReplica() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 2, false, false);
}
@Test
void testTwoHealthyAndInflightAdd() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
validate(rcnt, false, 0, false, false);
}
/**
* This does not schedule a container to be removed, as the inFlight add may
* fail and then the delete would make things under-replicated. Once the add
* completes there will be 4 healthy, and it will get taken care of then.
*/
@Test
void testThreeHealthyAndInflightAdd() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
validate(rcnt, true, 0, false, false);
}
/**
* As the inflight delete may fail, but as it will make the the container
* under replicated, we go ahead and schedule another replica to be added.
*/
@Test
void testThreeHealthyAndInflightDelete() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 1, 3, 2);
validate(rcnt, false, 1, false, false);
}
/**
* This is NOT sufficiently replicated as the inflight add may fail and the
* inflight del could succeed, leaving only 2 healthy replicas.
*/
@Test
void testThreeHealthyAndInflightAddAndInFlightDelete() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 1, 3, 2);
validate(rcnt, false, 0, false, false);
}
@Test
void testFourHealthyReplicas() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, true, -1, true, false);
}
@Test
void testFourHealthyReplicasAndInFlightDelete() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 1, 3, 2);
validate(rcnt, true, 0, false, false);
}
@Test
void testFourHealthyReplicasAndTwoInFlightDelete() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 2, 3, 2);
validate(rcnt, false, 1, false, false);
}
@Test
void testOneHealthyReplicaRepFactorOne() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 1, 2);
validate(rcnt, true, 0, false, false);
}
@Test
void testOneHealthyReplicaRepFactorOneInFlightDelete() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 1, 1, 2);
validate(rcnt, false, 1, false, false);
}
@Test
void testTwoHealthyReplicaTwoInflightAdd() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 2, 0, 3, 2);
validate(rcnt, false, 0, false, false);
}
/**
* From here consider decommission replicas.
*/
@Test
void testThreeHealthyAndTwoDecommission() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE,
IN_SERVICE, DECOMMISSIONING, DECOMMISSIONING);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, true, 0, false, false);
}
@ParameterizedTest
@MethodSource("org.apache.hadoop.hdds.scm.node.NodeStatus#decommissionStates")
void testOneDecommissionReplica(HddsProtos.NodeOperationalState state) {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, state);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testTwoHealthyOneDecommissionedneInFlightAdd() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, DECOMMISSIONED);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
validate(rcnt, false, 0, false, true);
}
@ParameterizedTest
@MethodSource("org.apache.hadoop.hdds.scm.node.NodeStatus#decommissionStates")
void testAllDecommissioned(HddsProtos.NodeOperationalState state) {
Set<ContainerReplica> replica =
registerNodes(state, state, state);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 3, false, true);
}
@Test
void testAllDecommissionedRepFactorOne() {
Set<ContainerReplica> replica = registerNodes(DECOMMISSIONED);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 1, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testAllDecommissionedRepFactorOneInFlightAdd() {
Set<ContainerReplica> replica = registerNodes(DECOMMISSIONED);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 1, 2);
validate(rcnt, false, 0, false, true);
}
@Test
void testOneHealthyOneDecommissioningRepFactorOne() {
Set<ContainerReplica> replica = registerNodes(DECOMMISSIONED, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 1, 2);
validate(rcnt, true, 0, false, false);
}
/**
* Maintenance tests from here.
*/
@ParameterizedTest
@MethodSource("org.apache.hadoop.hdds.scm.node.NodeStatus#maintenanceStates")
void testOneHealthyTwoMaintenanceMinRepOfTwo(
HddsProtos.NodeOperationalState state) {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, state, state);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testOneHealthyThreeMaintenanceMinRepOfTwo() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE,
IN_MAINTENANCE, IN_MAINTENANCE, ENTERING_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testOneHealthyTwoMaintenanceMinRepOfOne() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_MAINTENANCE, ENTERING_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 1);
validate(rcnt, true, 0, false, false);
}
@Test
void testOneHealthyThreeMaintenanceMinRepOfTwoInFlightAdd() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE,
IN_MAINTENANCE, ENTERING_MAINTENANCE, IN_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
validate(rcnt, false, 0, false, true);
}
@Test
void testAllMaintenance() {
Set<ContainerReplica> replica =
registerNodes(IN_MAINTENANCE, ENTERING_MAINTENANCE, IN_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, false, 2, false, true);
}
/**
* As we have exactly 3 healthy, but then an excess of maintenance copies
* we ignore the over-replication caused by the maintenance copies until they
* come back online, and then deal with them.
*/
@Test
void testThreeHealthyTwoInMaintenance() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE,
IN_SERVICE, IN_MAINTENANCE, ENTERING_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, true, 0, false, false);
}
/**
* This is somewhat similar to testThreeHealthyTwoInMaintenance() except now
* one of the maintenance copies has become healthy and we will need to remove
* the over-replicated healthy container.
*/
@Test
void testFourHealthyOneInMaintenance() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE, IN_SERVICE,
IN_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
validate(rcnt, true, -1, true, false);
assertTrue(rcnt.isSafelyOverReplicated());
}
@Test
void testOneMaintenanceMinRepOfTwoRepFactorOne() {
Set<ContainerReplica> replica = registerNodes(IN_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 1, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testOneMaintenanceMinRepOfTwoRepFactorOneInFlightAdd() {
Set<ContainerReplica> replica = registerNodes(IN_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 1, 2);
validate(rcnt, false, 0, false, true);
}
@Test
void testOneHealthyOneMaintenanceRepFactorOne() {
Set<ContainerReplica> replica = registerNodes(IN_MAINTENANCE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 1, 2);
validate(rcnt, true, 0, false, false);
}
@Test
void testTwoDecomTwoMaintenanceOneInflightAdd() {
Set<ContainerReplica> replica =
registerNodes(DECOMMISSIONED, DECOMMISSIONING,
IN_MAINTENANCE, ENTERING_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
validate(rcnt, false, 1, false, true);
}
@Test
void testHealthyContainerIsHealthy() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertTrue(rcnt.isHealthy());
}
@Test
void testIsHealthyWithDifferentReplicaStateNotHealthy() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
for (ContainerReplica r : replica) {
DatanodeDetails dn = r.getDatanodeDetails();
ContainerReplica replace = new ContainerReplica.ContainerReplicaBuilder()
.setContainerID(ContainerID.valueOf(1))
.setContainerState(OPEN)
.setDatanodeDetails(dn)
.setOriginNodeId(dn.getID())
.setSequenceId(1)
.build();
replica.remove(r);
replica.add(replace);
break;
}
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertFalse(rcnt.isHealthy());
}
@Test
void testSufficientReplicationWithMismatchedReplicaState() {
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(ContainerID.valueOf(1L), State.CLOSED, 0, 0);
replicas.add(createContainerReplica(ContainerID.valueOf(1L), 0,
IN_SERVICE, CLOSING));
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
validate(rcnt, true, 0, false, false);
}
@Test
void testReplicaCounts() {
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(ContainerID.valueOf(1L), State.CLOSED, 0, 0);
replicas.add(createContainerReplica(ContainerID.valueOf(1L), 0,
IN_SERVICE, CLOSING));
replicas.add(createContainerReplica(ContainerID.valueOf(1L), 0,
IN_SERVICE, UNHEALTHY));
// First, test by not considering UNHEALTHY replicas.
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
assertTrue(rcnt.isSufficientlyReplicated());
assertFalse(rcnt.isOverReplicated());
assertEquals(0, rcnt.getExcessRedundancy(true));
// CLOSED + CLOSED + CLOSING = 3
assertEquals(3, rcnt.getHealthyReplicaCount());
// CLOSING = 1
assertEquals(1, rcnt.getMisMatchedReplicaCount());
// CLOSED + CLOSED = 2
assertEquals(2, rcnt.getMatchingReplicaCount());
// UNHEALTHY = 1
assertEquals(1, rcnt.getUnhealthyReplicaCount());
// Now, test by considering UNHEALTHY replicas
rcnt = new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, true);
assertTrue(rcnt.isSufficientlyReplicated());
assertTrue(rcnt.isOverReplicated());
assertEquals(1, rcnt.getExcessRedundancy(true));
// CLOSED + CLOSED + CLOSING = 3
assertEquals(3, rcnt.getHealthyReplicaCount());
// CLOSING = 1
assertEquals(1, rcnt.getMisMatchedReplicaCount());
// CLOSED + CLOSED = 2
assertEquals(2, rcnt.getMatchingReplicaCount());
// UNHEALTHY = 1
assertEquals(1, rcnt.getUnhealthyReplicaCount());
}
@Test
void testUnhealthyReplicaOnDecommissionedNodeWithPendingDelete() {
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(ContainerID.valueOf(1L), State.CLOSED, 0, 0);
replicas.add(createContainerReplica(ContainerID.valueOf(1L), 0,
IN_SERVICE, CLOSING));
ContainerReplica unhealthyReplica =
createContainerReplica(ContainerID.valueOf(1L), 0,
DECOMMISSIONED, UNHEALTHY);
replicas.add(unhealthyReplica);
// First, test by not considering UNHEALTHY replicas.
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
assertTrue(rcnt.isSufficientlyReplicated());
assertFalse(rcnt.isOverReplicated());
assertEquals(0, rcnt.getExcessRedundancy(true));
// CLOSED + CLOSED + CLOSING = 3
assertEquals(3, rcnt.getHealthyReplicaCount());
// CLOSING = 1
assertEquals(1, rcnt.getMisMatchedReplicaCount());
// CLOSED + CLOSED = 2
assertEquals(2, rcnt.getMatchingReplicaCount());
// UNHEALTHY decommissioned is counted, too
assertEquals(1, rcnt.getUnhealthyReplicaCount());
// due to considerUnhealthy=false
assertEquals(0, rcnt.getDecommissionCount());
// Now, test by considering UNHEALTHY replicas
rcnt = new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, true);
assertTrue(rcnt.isSufficientlyReplicated());
assertFalse(rcnt.isOverReplicated());
assertEquals(0, rcnt.getExcessRedundancy(true));
// CLOSED + CLOSED + CLOSING = 3
assertEquals(3, rcnt.getHealthyReplicaCount());
// CLOSING = 1
assertEquals(1, rcnt.getMisMatchedReplicaCount());
// CLOSED + CLOSED = 2
assertEquals(2, rcnt.getMatchingReplicaCount());
// UNHEALTHY decommissioned is counted as unhealthy
assertEquals(1, rcnt.getUnhealthyReplicaCount());
// due to considerUnhealthy=true
assertEquals(1, rcnt.getDecommissionCount());
}
/**
* There is a CLOSED container with 3 CLOSED replicas and 1 UNHEALTHY
* replica. There is a pending delete on the UNHEALTHY replica.
* Expectation: If considerUnhealthy in RatisContainerReplicaCount is
* false, the pending delete on the UNHEALTHY replica should be ignored.
* The container should be sufficiently replicated.
* If considerUnhealthy is true, the pending delete should be considered,
* but the container is still sufficiently replicated because we have
* enough CLOSED replicas.
*/
@Test
void testSufficientReplicationWithPendingDeleteOnUnhealthyReplica() {
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(container.containerID(), State.CLOSED, 0, 0, 0);
ContainerReplica unhealthyReplica = createContainerReplica(
ContainerID.valueOf(1L), 0, IN_SERVICE, UNHEALTHY);
replicas.add(unhealthyReplica);
List<ContainerReplicaOp> ops = new ArrayList<>();
ops.add(ContainerReplicaOp.create(ContainerReplicaOp.PendingOpType.DELETE,
unhealthyReplica.getDatanodeDetails(), 0));
RatisContainerReplicaCount withoutUnhealthy =
new RatisContainerReplicaCount(container, replicas, ops, 2, false);
validate(withoutUnhealthy, true, 0, false, false);
RatisContainerReplicaCount withUnhealthy =
new RatisContainerReplicaCount(container, replicas, ops, 2, true);
validate(withUnhealthy, true, 0, false, false);
}
/**
* Scenario: A CLOSED RATIS container with 2 CLOSED and 1 UNHEALTHY replicas.
* Expectation: If considerUnhealthy is false, this container is not
* sufficiently replicated and replicaDelta is 1.
*/
@Test
public void testUnderReplicationBecauseOfUnhealthyReplica() {
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(container.containerID(), State.CLOSED, 0, 0);
ContainerReplica unhealthyReplica = createContainerReplica(
ContainerID.valueOf(1L), 0, IN_SERVICE, UNHEALTHY);
replicas.add(unhealthyReplica);
RatisContainerReplicaCount withoutUnhealthy =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2,
false);
validate(withoutUnhealthy, false, 1, false, false);
}
@Test
void testIsHealthyWithMaintReplicaIsHealthy() {
Set<ContainerReplica> replica =
registerNodes(IN_SERVICE, IN_SERVICE, IN_MAINTENANCE,
ENTERING_MAINTENANCE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertTrue(rcnt.isHealthy());
}
@Test
void testContainerWithNoReplicasIsMissing() {
Set<ContainerReplica> replica = new HashSet<>();
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertTrue(rcnt.isUnrecoverable());
assertFalse(rcnt.isSufficientlyReplicated());
}
@Test
void testOverReplicatedWithAndWithoutPending() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE,
IN_SERVICE, IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 2, 3, 2);
assertTrue(rcnt.isOverReplicated(false));
assertFalse(rcnt.isOverReplicated(true));
assertEquals(2, rcnt.getExcessRedundancy(false));
assertEquals(0, rcnt.getExcessRedundancy(true));
}
/**
* A container is safely over replicated if:
* 1. It is over replicated.
* 2. Has at least replication factor number of matching replicas.
*/
@Test
void testSafelyOverReplicated() {
/*
First case: 3 CLOSED, 2 UNHEALTHY, 1 pending delete.
*/
ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
Set<ContainerReplica> replicas =
createReplicas(container.containerID(), State.CLOSED, 0, 0, 0);
Set<ContainerReplica> unhealthyReplicas =
createReplicas(container.containerID(), UNHEALTHY, 0, 0);
replicas.addAll(unhealthyReplicas);
List<ContainerReplicaOp> ops = new ArrayList<>();
ops.add(ContainerReplicaOp.create(ContainerReplicaOp.PendingOpType.DELETE,
unhealthyReplicas.iterator().next().getDatanodeDetails(), 0));
RatisContainerReplicaCount withoutUnhealthy =
new RatisContainerReplicaCount(container, replicas, ops, 2, false);
validate(withoutUnhealthy, true, 0, false, false);
// not safely over replicated (3 CLOSED - 1 pending delete)
assertFalse(withoutUnhealthy.isSafelyOverReplicated());
RatisContainerReplicaCount withUnhealthy =
new RatisContainerReplicaCount(container, replicas, ops, 2, true);
validate(withUnhealthy, true, -1, true, false);
assertTrue(withUnhealthy.isSafelyOverReplicated());
/*
Second case: 2 CLOSED, 1 CLOSING, 1 UNHEALTHY
*/
container = createContainerInfo(RatisReplicationConfig.getInstance(
THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
replicas = createReplicas(container.containerID(), State.CLOSED, 0, 0);
ContainerReplica unhealthyReplica =
createContainerReplica(container.containerID(), 0, IN_SERVICE,
UNHEALTHY);
ContainerReplica misMatchedReplica =
createContainerReplica(container.containerID(), 0, IN_SERVICE, CLOSING);
replicas.add(unhealthyReplica);
replicas.add(misMatchedReplica);
withoutUnhealthy =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2,
false);
validate(withoutUnhealthy, true, 0, false, false);
assertFalse(withoutUnhealthy.isSafelyOverReplicated());
withUnhealthy =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2,
true);
validate(withUnhealthy, true, -1, true, false);
// Violates rule 2
assertFalse(withUnhealthy.isSafelyOverReplicated());
// now check by adding a CLOSED replica
replicas.add(createContainerReplica(container.containerID(), 0,
IN_SERVICE, State.CLOSED));
withUnhealthy = new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, true);
validate(withUnhealthy, true, -2, true, false);
assertTrue(withUnhealthy.isSafelyOverReplicated());
}
@Test
void testRemainingRedundancy() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE,
IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 1, 3, 2);
assertEquals(2, rcnt.getRemainingRedundancy());
replica = registerNodes(IN_SERVICE);
rcnt = new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertEquals(0, rcnt.getRemainingRedundancy());
rcnt = new RatisContainerReplicaCount(container, replica, 0, 1, 3, 2);
assertEquals(0, rcnt.getRemainingRedundancy());
}
@Test
void testSufficientlyReplicatedWithAndWithoutPending() {
Set<ContainerReplica> replica = registerNodes(IN_SERVICE, IN_SERVICE);
ContainerInfo container = createContainer(HddsProtos.LifeCycleState.CLOSED);
RatisContainerReplicaCount rcnt =
new RatisContainerReplicaCount(container, replica, 0, 0, 3, 2);
assertFalse(rcnt.isSufficientlyReplicated(true));
assertFalse(rcnt.isSufficientlyReplicated(false));
rcnt = new RatisContainerReplicaCount(container, replica, 1, 0, 3, 2);
assertTrue(rcnt.isSufficientlyReplicated(true));
assertFalse(rcnt.isSufficientlyReplicated(false));
replica = registerNodes(IN_SERVICE, IN_SERVICE, IN_SERVICE);
rcnt =
new RatisContainerReplicaCount(container, replica, 0, 1, 3, 2);
assertFalse(rcnt.isSufficientlyReplicated(false));
assertFalse(rcnt.isSufficientlyReplicated(true));
rcnt =
new RatisContainerReplicaCount(container, replica, 1, 1, 3, 2);
assertFalse(rcnt.isSufficientlyReplicated(false));
assertTrue(rcnt.isSufficientlyReplicated(true));
}
@Test
void testQuasiClosedReplicaWithCorrectSequenceID() {
final ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(THREE), 1L,
HddsProtos.LifeCycleState.CLOSED);
final Set<ContainerReplica> replicas =
createReplicas(container.containerID(), State.CLOSED, 0, 0);
final Set<ContainerReplica> quasiClosedReplica =
createReplicas(container.containerID(), QUASI_CLOSED, 0);
replicas.addAll(quasiClosedReplica);
final RatisContainerReplicaCount crc =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
validate(crc, true, 0, false, false);
assertTrue(crc.isSufficientlyReplicated(true));
assertEquals(0, crc.getUnhealthyReplicaCount());
// With additional unhealthy replica
final Set<ContainerReplica> unhealthyReplica =
createReplicas(container.containerID(), UNHEALTHY, 0);
replicas.addAll(unhealthyReplica);
final RatisContainerReplicaCount crcWithUnhealthy =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
validate(crcWithUnhealthy, true, 0, false, false);
assertTrue(crcWithUnhealthy.isSufficientlyReplicated(true));
assertEquals(1, crcWithUnhealthy.getUnhealthyReplicaCount());
}
@Test
void testQuasiClosedReplicaWithInCorrectSequenceID() {
final long sequenceID = 101;
final ContainerInfo container =
createContainerInfo(RatisReplicationConfig.getInstance(THREE), 1L,
HddsProtos.LifeCycleState.CLOSED, sequenceID);
final ContainerID containerID = container.containerID();
final ContainerReplica replicaOne = createContainerReplica(
containerID, 0, IN_SERVICE, State.CLOSED, sequenceID);
final ContainerReplica replicaTwo = createContainerReplica(
containerID, 0, IN_SERVICE, State.CLOSED, sequenceID);
final ContainerReplica quasiCloseReplica = createContainerReplica(
containerID, 0, IN_SERVICE, State.QUASI_CLOSED, sequenceID - 5);
final Set<ContainerReplica> replicas = new HashSet<>();
replicas.add(replicaOne);
replicas.add(replicaTwo);
replicas.add(quasiCloseReplica);
final RatisContainerReplicaCount crc =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
validate(crc, false, 1, false, false);
assertFalse(crc.isSufficientlyReplicated(true));
assertEquals(1, crc.getUnhealthyReplicaCount());
// With additional unhealthy replica
final Set<ContainerReplica> unhealthyReplica =
createReplicas(container.containerID(), UNHEALTHY, 0);
replicas.addAll(unhealthyReplica);
final RatisContainerReplicaCount crcWithUnhealthy =
new RatisContainerReplicaCount(container, replicas,
Collections.emptyList(), 2, false);
validate(crcWithUnhealthy, false, 1, false, false);
assertFalse(crcWithUnhealthy.isSufficientlyReplicated(true));
assertEquals(2, crcWithUnhealthy.getUnhealthyReplicaCount());
}
private void validate(RatisContainerReplicaCount rcnt,
boolean sufficientlyReplicated, int replicaDelta,
boolean overReplicated, boolean insufficientDueToOutOfService) {
assertEquals(sufficientlyReplicated, rcnt.isSufficientlyReplicated());
assertEquals(overReplicated, rcnt.isOverReplicated());
assertEquals(replicaDelta, rcnt.additionalReplicaNeeded());
assertEquals(insufficientDueToOutOfService,
rcnt.insufficientDueToOutOfService());
}
private Set<ContainerReplica> registerNodes(
HddsProtos.NodeOperationalState... states) {
Set<ContainerReplica> replica = new HashSet<>();
for (HddsProtos.NodeOperationalState s : states) {
DatanodeDetails dn = MockDatanodeDetails.randomDatanodeDetails();
dn.setPersistedOpState(s);
replica.add(new ContainerReplica.ContainerReplicaBuilder()
.setContainerID(ContainerID.valueOf(1))
.setContainerState(State.CLOSED)
.setDatanodeDetails(dn)
.setOriginNodeId(dn.getID())
.setSequenceId(1)
.build());
}
return replica;
}
private ContainerInfo createContainer(HddsProtos.LifeCycleState state) {
return new ContainerInfo.Builder()
.setContainerID(1)
.setState(state)
.build();
}
}
|
googleapis/google-cloud-java | 35,575 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/UpdateTestCaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/test_case.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The request message for
* [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest}
*/
public final class UpdateTestCaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest)
UpdateTestCaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTestCaseRequest.newBuilder() to construct.
private UpdateTestCaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTestCaseRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTestCaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateTestCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.class,
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.Builder.class);
}
private int bitField0_;
public static final int TEST_CASE_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.cx.v3.TestCase testCase_;
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testCase field is set.
*/
@java.lang.Override
public boolean hasTestCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testCase.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.TestCase getTestCase() {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3.TestCase.getDefaultInstance()
: testCase_;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.TestCaseOrBuilder getTestCaseOrBuilder() {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3.TestCase.getDefaultInstance()
: testCase_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTestCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTestCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest other =
(com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest) obj;
if (hasTestCase() != other.hasTestCase()) return false;
if (hasTestCase()) {
if (!getTestCase().equals(other.getTestCase())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTestCase()) {
hash = (37 * hash) + TEST_CASE_FIELD_NUMBER;
hash = (53 * hash) + getTestCase().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest)
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateTestCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.class,
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTestCaseFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
testCase_ = null;
if (testCaseBuilder_ != null) {
testCaseBuilder_.dispose();
testCaseBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest build() {
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest result =
new com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.testCase_ = testCaseBuilder_ == null ? testCase_ : testCaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest other) {
if (other == com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest.getDefaultInstance())
return this;
if (other.hasTestCase()) {
mergeTestCase(other.getTestCase());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTestCaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.cx.v3.TestCase testCase_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCase,
com.google.cloud.dialogflow.cx.v3.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseOrBuilder>
testCaseBuilder_;
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testCase field is set.
*/
public boolean hasTestCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testCase.
*/
public com.google.cloud.dialogflow.cx.v3.TestCase getTestCase() {
if (testCaseBuilder_ == null) {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3.TestCase.getDefaultInstance()
: testCase_;
} else {
return testCaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestCase(com.google.cloud.dialogflow.cx.v3.TestCase value) {
if (testCaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testCase_ = value;
} else {
testCaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestCase(com.google.cloud.dialogflow.cx.v3.TestCase.Builder builderForValue) {
if (testCaseBuilder_ == null) {
testCase_ = builderForValue.build();
} else {
testCaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTestCase(com.google.cloud.dialogflow.cx.v3.TestCase value) {
if (testCaseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& testCase_ != null
&& testCase_ != com.google.cloud.dialogflow.cx.v3.TestCase.getDefaultInstance()) {
getTestCaseBuilder().mergeFrom(value);
} else {
testCase_ = value;
}
} else {
testCaseBuilder_.mergeFrom(value);
}
if (testCase_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTestCase() {
bitField0_ = (bitField0_ & ~0x00000001);
testCase_ = null;
if (testCaseBuilder_ != null) {
testCaseBuilder_.dispose();
testCaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCase.Builder getTestCaseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTestCaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseOrBuilder getTestCaseOrBuilder() {
if (testCaseBuilder_ != null) {
return testCaseBuilder_.getMessageOrBuilder();
} else {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3.TestCase.getDefaultInstance()
: testCase_;
}
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCase,
com.google.cloud.dialogflow.cx.v3.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseOrBuilder>
getTestCaseFieldBuilder() {
if (testCaseBuilder_ == null) {
testCaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCase,
com.google.cloud.dialogflow.cx.v3.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseOrBuilder>(
getTestCase(), getParentForChildren(), isClean());
testCase_ = null;
}
return testCaseBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3.TestCase.creation_time] and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest)
private static final com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest();
}
public static com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTestCaseRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTestCaseRequest>() {
@java.lang.Override
public UpdateTestCaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTestCaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTestCaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateTestCaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,634 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/PredictRequestResponseLoggingConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/endpoint.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Configuration for logging request-response to a BigQuery table.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig}
*/
public final class PredictRequestResponseLoggingConfig
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig)
PredictRequestResponseLoggingConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use PredictRequestResponseLoggingConfig.newBuilder() to construct.
private PredictRequestResponseLoggingConfig(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PredictRequestResponseLoggingConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PredictRequestResponseLoggingConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EndpointProto
.internal_static_google_cloud_aiplatform_v1_PredictRequestResponseLoggingConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EndpointProto
.internal_static_google_cloud_aiplatform_v1_PredictRequestResponseLoggingConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig.class,
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig.Builder.class);
}
private int bitField0_;
public static final int ENABLED_FIELD_NUMBER = 1;
private boolean enabled_ = false;
/**
*
*
* <pre>
* If logging is enabled or not.
* </pre>
*
* <code>bool enabled = 1;</code>
*
* @return The enabled.
*/
@java.lang.Override
public boolean getEnabled() {
return enabled_;
}
public static final int SAMPLING_RATE_FIELD_NUMBER = 2;
private double samplingRate_ = 0D;
/**
*
*
* <pre>
* Percentage of requests to be logged, expressed as a fraction in
* range(0,1].
* </pre>
*
* <code>double sampling_rate = 2;</code>
*
* @return The samplingRate.
*/
@java.lang.Override
public double getSamplingRate() {
return samplingRate_;
}
public static final int BIGQUERY_DESTINATION_FIELD_NUMBER = 3;
private com.google.cloud.aiplatform.v1.BigQueryDestination bigqueryDestination_;
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*
* @return Whether the bigqueryDestination field is set.
*/
@java.lang.Override
public boolean hasBigqueryDestination() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*
* @return The bigqueryDestination.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.BigQueryDestination getBigqueryDestination() {
return bigqueryDestination_ == null
? com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()
: bigqueryDestination_;
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder
getBigqueryDestinationOrBuilder() {
return bigqueryDestination_ == null
? com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()
: bigqueryDestination_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (enabled_ != false) {
output.writeBool(1, enabled_);
}
if (java.lang.Double.doubleToRawLongBits(samplingRate_) != 0) {
output.writeDouble(2, samplingRate_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getBigqueryDestination());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (enabled_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, enabled_);
}
if (java.lang.Double.doubleToRawLongBits(samplingRate_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(2, samplingRate_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getBigqueryDestination());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig other =
(com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig) obj;
if (getEnabled() != other.getEnabled()) return false;
if (java.lang.Double.doubleToLongBits(getSamplingRate())
!= java.lang.Double.doubleToLongBits(other.getSamplingRate())) return false;
if (hasBigqueryDestination() != other.hasBigqueryDestination()) return false;
if (hasBigqueryDestination()) {
if (!getBigqueryDestination().equals(other.getBigqueryDestination())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENABLED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnabled());
hash = (37 * hash) + SAMPLING_RATE_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getSamplingRate()));
if (hasBigqueryDestination()) {
hash = (37 * hash) + BIGQUERY_DESTINATION_FIELD_NUMBER;
hash = (53 * hash) + getBigqueryDestination().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for logging request-response to a BigQuery table.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig)
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EndpointProto
.internal_static_google_cloud_aiplatform_v1_PredictRequestResponseLoggingConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EndpointProto
.internal_static_google_cloud_aiplatform_v1_PredictRequestResponseLoggingConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig.class,
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBigqueryDestinationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
enabled_ = false;
samplingRate_ = 0D;
bigqueryDestination_ = null;
if (bigqueryDestinationBuilder_ != null) {
bigqueryDestinationBuilder_.dispose();
bigqueryDestinationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EndpointProto
.internal_static_google_cloud_aiplatform_v1_PredictRequestResponseLoggingConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig build() {
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig buildPartial() {
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig result =
new com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.enabled_ = enabled_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.samplingRate_ = samplingRate_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.bigqueryDestination_ =
bigqueryDestinationBuilder_ == null
? bigqueryDestination_
: bigqueryDestinationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig) {
return mergeFrom(
(com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig other) {
if (other
== com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
.getDefaultInstance()) return this;
if (other.getEnabled() != false) {
setEnabled(other.getEnabled());
}
if (other.getSamplingRate() != 0D) {
setSamplingRate(other.getSamplingRate());
}
if (other.hasBigqueryDestination()) {
mergeBigqueryDestination(other.getBigqueryDestination());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
enabled_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 8
case 17:
{
samplingRate_ = input.readDouble();
bitField0_ |= 0x00000002;
break;
} // case 17
case 26:
{
input.readMessage(
getBigqueryDestinationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean enabled_;
/**
*
*
* <pre>
* If logging is enabled or not.
* </pre>
*
* <code>bool enabled = 1;</code>
*
* @return The enabled.
*/
@java.lang.Override
public boolean getEnabled() {
return enabled_;
}
/**
*
*
* <pre>
* If logging is enabled or not.
* </pre>
*
* <code>bool enabled = 1;</code>
*
* @param value The enabled to set.
* @return This builder for chaining.
*/
public Builder setEnabled(boolean value) {
enabled_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* If logging is enabled or not.
* </pre>
*
* <code>bool enabled = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnabled() {
bitField0_ = (bitField0_ & ~0x00000001);
enabled_ = false;
onChanged();
return this;
}
private double samplingRate_;
/**
*
*
* <pre>
* Percentage of requests to be logged, expressed as a fraction in
* range(0,1].
* </pre>
*
* <code>double sampling_rate = 2;</code>
*
* @return The samplingRate.
*/
@java.lang.Override
public double getSamplingRate() {
return samplingRate_;
}
/**
*
*
* <pre>
* Percentage of requests to be logged, expressed as a fraction in
* range(0,1].
* </pre>
*
* <code>double sampling_rate = 2;</code>
*
* @param value The samplingRate to set.
* @return This builder for chaining.
*/
public Builder setSamplingRate(double value) {
samplingRate_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Percentage of requests to be logged, expressed as a fraction in
* range(0,1].
* </pre>
*
* <code>double sampling_rate = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSamplingRate() {
bitField0_ = (bitField0_ & ~0x00000002);
samplingRate_ = 0D;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1.BigQueryDestination bigqueryDestination_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.BigQueryDestination,
com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>
bigqueryDestinationBuilder_;
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*
* @return Whether the bigqueryDestination field is set.
*/
public boolean hasBigqueryDestination() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*
* @return The bigqueryDestination.
*/
public com.google.cloud.aiplatform.v1.BigQueryDestination getBigqueryDestination() {
if (bigqueryDestinationBuilder_ == null) {
return bigqueryDestination_ == null
? com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()
: bigqueryDestination_;
} else {
return bigqueryDestinationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public Builder setBigqueryDestination(
com.google.cloud.aiplatform.v1.BigQueryDestination value) {
if (bigqueryDestinationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
bigqueryDestination_ = value;
} else {
bigqueryDestinationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public Builder setBigqueryDestination(
com.google.cloud.aiplatform.v1.BigQueryDestination.Builder builderForValue) {
if (bigqueryDestinationBuilder_ == null) {
bigqueryDestination_ = builderForValue.build();
} else {
bigqueryDestinationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public Builder mergeBigqueryDestination(
com.google.cloud.aiplatform.v1.BigQueryDestination value) {
if (bigqueryDestinationBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& bigqueryDestination_ != null
&& bigqueryDestination_
!= com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()) {
getBigqueryDestinationBuilder().mergeFrom(value);
} else {
bigqueryDestination_ = value;
}
} else {
bigqueryDestinationBuilder_.mergeFrom(value);
}
if (bigqueryDestination_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public Builder clearBigqueryDestination() {
bitField0_ = (bitField0_ & ~0x00000004);
bigqueryDestination_ = null;
if (bigqueryDestinationBuilder_ != null) {
bigqueryDestinationBuilder_.dispose();
bigqueryDestinationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public com.google.cloud.aiplatform.v1.BigQueryDestination.Builder
getBigqueryDestinationBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getBigqueryDestinationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
public com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder
getBigqueryDestinationOrBuilder() {
if (bigqueryDestinationBuilder_ != null) {
return bigqueryDestinationBuilder_.getMessageOrBuilder();
} else {
return bigqueryDestination_ == null
? com.google.cloud.aiplatform.v1.BigQueryDestination.getDefaultInstance()
: bigqueryDestination_;
}
}
/**
*
*
* <pre>
* BigQuery table for logging.
* If only given a project, a new dataset will be created with name
* `logging_<endpoint-display-name>_<endpoint-id>` where
* <endpoint-display-name> will be made BigQuery-dataset-name compatible (e.g.
* most special characters will become underscores). If no table name is
* given, a new table will be created with name `request_response_logging`
* </pre>
*
* <code>.google.cloud.aiplatform.v1.BigQueryDestination bigquery_destination = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.BigQueryDestination,
com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>
getBigqueryDestinationFieldBuilder() {
if (bigqueryDestinationBuilder_ == null) {
bigqueryDestinationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.BigQueryDestination,
com.google.cloud.aiplatform.v1.BigQueryDestination.Builder,
com.google.cloud.aiplatform.v1.BigQueryDestinationOrBuilder>(
getBigqueryDestination(), getParentForChildren(), isClean());
bigqueryDestination_ = null;
}
return bigqueryDestinationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig)
private static final com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig();
}
public static com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PredictRequestResponseLoggingConfig> PARSER =
new com.google.protobuf.AbstractParser<PredictRequestResponseLoggingConfig>() {
@java.lang.Override
public PredictRequestResponseLoggingConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PredictRequestResponseLoggingConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PredictRequestResponseLoggingConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.PredictRequestResponseLoggingConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jackrabbit-oak | 35,735 | oak-lucene/src/main/java/org/apache/lucene/search/IndexSearcher.java | /*
* COPIED FROM APACHE LUCENE 4.7.2
*
* Git URL: git@github.com:apache/lucene.git, tag: releases/lucene-solr/4.7.2, path: lucene/core/src/java
*
* (see https://issues.apache.org/jira/browse/OAK-10786 for details)
*/
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader; // javadocs
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.NIOFSDirectory; // javadoc
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.index.IndexWriter; // javadocs
/** Implements search over a single IndexReader.
*
* <p>Applications usually need only call the inherited
* {@link #search(Query,int)}
* or {@link #search(Query,Filter,int)} methods. For
* performance reasons, if your index is unchanging, you
* should share a single IndexSearcher instance across
* multiple searches instead of creating a new one
* per-search. If your index has changed and you wish to
* see the changes reflected in searching, you should
* use {@link DirectoryReader#openIfChanged(DirectoryReader)}
* to obtain a new reader and
* then create a new IndexSearcher from that. Also, for
* low-latency turnaround it's best to use a near-real-time
* reader ({@link DirectoryReader#open(IndexWriter,boolean)}).
* Once you have a new {@link IndexReader}, it's relatively
* cheap to create a new IndexSearcher from it.
*
* <a name="thread-safety"></a><p><b>NOTE</b>: <code>{@link
* IndexSearcher}</code> instances are completely
* thread safe, meaning multiple threads can call any of its
* methods, concurrently. If your application requires
* external synchronization, you should <b>not</b>
* synchronize on the <code>IndexSearcher</code> instance;
* use your own (non-Lucene) objects instead.</p>
*/
public class IndexSearcher {
final IndexReader reader; // package private for testing!
// NOTE: these members might change in incompatible ways
// in the next release
protected final IndexReaderContext readerContext;
protected final List<AtomicReaderContext> leafContexts;
/** used with executor - each slice holds a set of leafs executed within one thread */
protected final LeafSlice[] leafSlices;
// These are only used for multi-threaded search
private final ExecutorService executor;
// the default Similarity
private static final Similarity defaultSimilarity = new DefaultSimilarity();
/**
* Expert: returns a default Similarity instance.
* In general, this method is only called to initialize searchers and writers.
* User code and query implementations should respect
* {@link IndexSearcher#getSimilarity()}.
* @lucene.internal
*/
public static Similarity getDefaultSimilarity() {
return defaultSimilarity;
}
/** The Similarity implementation used by this searcher. */
private Similarity similarity = defaultSimilarity;
/** Creates a searcher searching the provided index. */
public IndexSearcher(IndexReader r) {
this(r, null);
}
/** Runs searches for each segment separately, using the
* provided ExecutorService. IndexSearcher will not
* shutdown/awaitTermination this ExecutorService on
* close; you must do so, eventually, on your own. NOTE:
* if you are using {@link NIOFSDirectory}, do not use
* the shutdownNow method of ExecutorService as this uses
* Thread.interrupt under-the-hood which can silently
* close file descriptors (see <a
* href="https://issues.apache.org/jira/browse/LUCENE-2239">LUCENE-2239</a>).
*
* @lucene.experimental */
public IndexSearcher(IndexReader r, ExecutorService executor) {
this(r.getContext(), executor);
}
/**
* Creates a searcher searching the provided top-level {@link IndexReaderContext}.
* <p>
* Given a non-<code>null</code> {@link ExecutorService} this method runs
* searches for each segment separately, using the provided ExecutorService.
* IndexSearcher will not shutdown/awaitTermination this ExecutorService on
* close; you must do so, eventually, on your own. NOTE: if you are using
* {@link NIOFSDirectory}, do not use the shutdownNow method of
* ExecutorService as this uses Thread.interrupt under-the-hood which can
* silently close file descriptors (see <a
* href="https://issues.apache.org/jira/browse/LUCENE-2239">LUCENE-2239</a>).
*
* @see IndexReaderContext
* @see IndexReader#getContext()
* @lucene.experimental
*/
public IndexSearcher(IndexReaderContext context, ExecutorService executor) {
assert context.isTopLevel: "IndexSearcher's ReaderContext must be topLevel for reader" + context.reader();
reader = context.reader();
this.executor = executor;
this.readerContext = context;
leafContexts = context.leaves();
this.leafSlices = executor == null ? null : slices(leafContexts);
}
/**
* Creates a searcher searching the provided top-level {@link IndexReaderContext}.
*
* @see IndexReaderContext
* @see IndexReader#getContext()
* @lucene.experimental
*/
public IndexSearcher(IndexReaderContext context) {
this(context, null);
}
/**
* Expert: Creates an array of leaf slices each holding a subset of the given leaves.
* Each {@link LeafSlice} is executed in a single thread. By default there
* will be one {@link LeafSlice} per leaf ({@link AtomicReaderContext}).
*/
protected LeafSlice[] slices(List<AtomicReaderContext> leaves) {
LeafSlice[] slices = new LeafSlice[leaves.size()];
for (int i = 0; i < slices.length; i++) {
slices[i] = new LeafSlice(leaves.get(i));
}
return slices;
}
/** Return the {@link IndexReader} this searches. */
public IndexReader getIndexReader() {
return reader;
}
/**
* Sugar for <code>.getIndexReader().document(docID)</code>
* @see IndexReader#document(int)
*/
public Document doc(int docID) throws IOException {
return reader.document(docID);
}
/**
* Sugar for <code>.getIndexReader().document(docID, fieldVisitor)</code>
* @see IndexReader#document(int, StoredFieldVisitor)
*/
public void doc(int docID, StoredFieldVisitor fieldVisitor) throws IOException {
reader.document(docID, fieldVisitor);
}
/**
* Sugar for <code>.getIndexReader().document(docID, fieldsToLoad)</code>
* @see IndexReader#document(int, Set)
*/
public Document doc(int docID, Set<String> fieldsToLoad) throws IOException {
return reader.document(docID, fieldsToLoad);
}
/**
* @deprecated Use {@link #doc(int, Set)} instead.
*/
@Deprecated
public final Document document(int docID, Set<String> fieldsToLoad) throws IOException {
return doc(docID, fieldsToLoad);
}
/** Expert: Set the Similarity implementation used by this IndexSearcher.
*
*/
public void setSimilarity(Similarity similarity) {
this.similarity = similarity;
}
public Similarity getSimilarity() {
return similarity;
}
/** @lucene.internal */
protected Query wrapFilter(Query query, Filter filter) {
return (filter == null) ? query : new FilteredQuery(query, filter);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
* result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n) throws IOException {
return search(createNormalizedWeight(query), after, n);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null,
* where all results are after a previous result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), after, n);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs search(Query query, int n)
throws IOException {
return search(query, null, n);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs search(Query query, Filter filter, int n)
throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), null, n);
}
/** Lower-level search API.
*
* <p>{@link Collector#collect(int)} is called for every matching
* document.
*
* @param query to match documents
* @param filter if non-null, used to permit documents to be collected.
* @param results to receive hits
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public void search(Query query, Filter filter, Collector results)
throws IOException {
search(leafContexts, createNormalizedWeight(wrapFilter(query, filter)), results);
}
/** Lower-level search API.
*
* <p>{@link Collector#collect(int)} is called for every matching document.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public void search(Query query, Collector results)
throws IOException {
search(leafContexts, createNormalizedWeight(query), results);
}
/** Search implementation with arbitrary sorting. Finds
* the top <code>n</code> hits for <code>query</code>, applying
* <code>filter</code> if non-null, and sorting the hits by the criteria in
* <code>sort</code>.
*
* <p>NOTE: this does not compute scores by default; use
* {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to
* control scoring.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, false, false);
}
/** Search implementation with arbitrary sorting, plus
* control over whether hit scores and max score
* should be computed. Finds
* the top <code>n</code> hits for <code>query</code>, applying
* <code>filter</code> if non-null, and sorting the hits by the criteria in
* <code>sort</code>. If <code>doDocScores</code> is <code>true</code>
* then the score of each hit will be computed and
* returned. If <code>doMaxScore</code> is
* <code>true</code> then the maximum score over all
* collected hits will be computed.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, doDocScores, doMaxScore);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null,
* where all results are after a previous result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true, false, false);
}
/**
* Search implementation with arbitrary sorting and no filter.
* @param query The query to search for
* @param n Return only the top n results
* @param sort The {@link org.apache.lucene.search.Sort} object
* @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance
* @throws IOException if there is a low-level I/O error
*/
public TopFieldDocs search(Query query, int n,
Sort sort) throws IOException {
return search(createNormalizedWeight(query), n, sort, false, false);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
* result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(query), (FieldDoc) after, n, sort, true, false, false);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
* result (<code>after</code>), allowing control over
* whether hit scores and max score should be computed.
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets. If <code>doDocScores</code> is <code>true</code>
* then the score of each hit will be computed and
* returned. If <code>doMaxScore</code> is
* <code>true</code> then the maximum score over all
* collected hits will be computed.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true,
doDocScores, doMaxScore);
}
/** Expert: Low-level search implementation. Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null.
*
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
* {@link IndexSearcher#search(Query,Filter,int)} instead.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopDocs search(Weight weight, ScoreDoc after, int nDocs) throws IOException {
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc + " limit=" + limit);
}
nDocs = Math.min(nDocs, limit);
if (executor == null) {
return search(leafContexts, weight, after, nDocs);
} else {
final HitQueue hq = new HitQueue(nDocs, false);
final Lock lock = new ReentrantLock();
final ExecutionHelper<TopDocs> runner = new ExecutionHelper<TopDocs>(executor);
for (int i = 0; i < leafSlices.length; i++) { // search each sub
runner.submit(new SearcherCallableNoSort(lock, this, leafSlices[i], weight, after, nDocs, hq));
}
int totalHits = 0;
float maxScore = Float.NEGATIVE_INFINITY;
for (final TopDocs topDocs : runner) {
if(topDocs.totalHits != 0) {
totalHits += topDocs.totalHits;
maxScore = Math.max(maxScore, topDocs.getMaxScore());
}
}
final ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
for (int i = hq.size() - 1; i >= 0; i--) // put docs in array
scoreDocs[i] = hq.pop();
return new TopDocs(totalHits, scoreDocs, maxScore);
}
}
/** Expert: Low-level search implementation. Finds the top <code>n</code>
* hits for <code>query</code>.
*
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
* {@link IndexSearcher#search(Query,Filter,int)} instead.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopDocs search(List<AtomicReaderContext> leaves, Weight weight, ScoreDoc after, int nDocs) throws IOException {
// single thread
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, after, !weight.scoresDocsOutOfOrder());
search(leaves, weight, collector);
return collector.topDocs();
}
/** Expert: Low-level search implementation with arbitrary
* sorting and control over whether hit scores and max
* score should be computed. Finds
* the top <code>n</code> hits for <code>query</code> and sorting the hits
* by the criteria in <code>sort</code>.
*
* <p>Applications should usually call {@link
* IndexSearcher#search(Query,Filter,int,Sort)} instead.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopFieldDocs search(Weight weight,
final int nDocs, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
return search(weight, null, nDocs, sort, true, doDocScores, doMaxScore);
}
/**
* Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields.
*/
protected TopFieldDocs search(Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields,
boolean doDocScores, boolean doMaxScore)
throws IOException {
if (sort == null) throw new NullPointerException("Sort must not be null");
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
if (executor == null) {
// use all leaves here!
return search(leafContexts, weight, after, nDocs, sort, fillFields, doDocScores, doMaxScore);
} else {
final TopFieldCollector topCollector = TopFieldCollector.create(sort, nDocs,
after,
fillFields,
doDocScores,
doMaxScore,
false);
final Lock lock = new ReentrantLock();
final ExecutionHelper<TopFieldDocs> runner = new ExecutionHelper<TopFieldDocs>(executor);
for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
runner.submit(
new SearcherCallableWithSort(lock, this, leafSlices[i], weight, after, nDocs, topCollector, sort, doDocScores, doMaxScore));
}
int totalHits = 0;
float maxScore = Float.NEGATIVE_INFINITY;
for (final TopFieldDocs topFieldDocs : runner) {
if (topFieldDocs.totalHits != 0) {
totalHits += topFieldDocs.totalHits;
maxScore = Math.max(maxScore, topFieldDocs.getMaxScore());
}
}
final TopFieldDocs topDocs = (TopFieldDocs) topCollector.topDocs();
return new TopFieldDocs(totalHits, topDocs.scoreDocs, topDocs.fields, topDocs.getMaxScore());
}
}
/**
* Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields.
*/
protected TopFieldDocs search(List<AtomicReaderContext> leaves, Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields, boolean doDocScores, boolean doMaxScore) throws IOException {
// single thread
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, after,
fillFields, doDocScores,
doMaxScore, !weight.scoresDocsOutOfOrder());
search(leaves, weight, collector);
return (TopFieldDocs) collector.topDocs();
}
/**
* Lower-level search API.
*
* <p>
* {@link Collector#collect(int)} is called for every document. <br>
*
* <p>
* NOTE: this method executes the searches on all given leaves exclusively.
* To search across all the searchers leaves use {@link #leafContexts}.
*
* @param leaves
* the searchers leaves to execute the searches on
* @param weight
* to match documents
* @param collector
* to receive hits
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected void search(List<AtomicReaderContext> leaves, Weight weight, Collector collector)
throws IOException {
// TODO: should we make this
// threaded...? the Collector could be sync'd?
// always use single thread:
for (AtomicReaderContext ctx : leaves) { // search each subreader
try {
collector.setNextReader(ctx);
} catch (CollectionTerminatedException e) {
// there is no doc of interest in this reader context
// continue with the following leaf
continue;
}
Scorer scorer = weight.scorer(ctx, !collector.acceptsDocsOutOfOrder(), true, ctx.reader().getLiveDocs());
if (scorer != null) {
try {
scorer.score(collector);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
}
}
/** Expert: called to re-write queries into primitive queries.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public Query rewrite(Query original) throws IOException {
Query query = original;
for (Query rewrittenQuery = query.rewrite(reader); rewrittenQuery != query;
rewrittenQuery = query.rewrite(reader)) {
query = rewrittenQuery;
}
return query;
}
/** Returns an Explanation that describes how <code>doc</code> scored against
* <code>query</code>.
*
* <p>This is intended to be used in developing Similarity implementations,
* and, for good performance, should not be displayed with every hit.
* Computing an explanation is as expensive as executing the query over the
* entire index.
*/
public Explanation explain(Query query, int doc) throws IOException {
return explain(createNormalizedWeight(query), doc);
}
/** Expert: low-level implementation method
* Returns an Explanation that describes how <code>doc</code> scored against
* <code>weight</code>.
*
* <p>This is intended to be used in developing Similarity implementations,
* and, for good performance, should not be displayed with every hit.
* Computing an explanation is as expensive as executing the query over the
* entire index.
* <p>Applications should call {@link IndexSearcher#explain(Query, int)}.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected Explanation explain(Weight weight, int doc) throws IOException {
int n = ReaderUtil.subIndex(doc, leafContexts);
final AtomicReaderContext ctx = leafContexts.get(n);
int deBasedDoc = doc - ctx.docBase;
return weight.explain(ctx, deBasedDoc);
}
/**
* Creates a normalized weight for a top-level {@link Query}.
* The query is rewritten by this method and {@link Query#createWeight} called,
* afterwards the {@link Weight} is normalized. The returned {@code Weight}
* can then directly be used to get a {@link Scorer}.
* @lucene.internal
*/
public Weight createNormalizedWeight(Query query) throws IOException {
query = rewrite(query);
Weight weight = query.createWeight(this);
float v = weight.getValueForNormalization();
float norm = getSimilarity().queryNorm(v);
if (Float.isInfinite(norm) || Float.isNaN(norm)) {
norm = 1.0f;
}
weight.normalize(norm, 1.0f);
return weight;
}
/**
* Returns this searchers the top-level {@link IndexReaderContext}.
* @see IndexReader#getContext()
*/
/* sugar for #getReader().getTopReaderContext() */
public IndexReaderContext getTopReaderContext() {
return readerContext;
}
/**
* A thread subclass for searching a single searchable
*/
private static final class SearcherCallableNoSort implements Callable<TopDocs> {
private final Lock lock;
private final IndexSearcher searcher;
private final Weight weight;
private final ScoreDoc after;
private final int nDocs;
private final HitQueue hq;
private final LeafSlice slice;
public SearcherCallableNoSort(Lock lock, IndexSearcher searcher, LeafSlice slice, Weight weight,
ScoreDoc after, int nDocs, HitQueue hq) {
this.lock = lock;
this.searcher = searcher;
this.weight = weight;
this.after = after;
this.nDocs = nDocs;
this.hq = hq;
this.slice = slice;
}
@Override
public TopDocs call() throws IOException {
final TopDocs docs = searcher.search(Arrays.asList(slice.leaves), weight, after, nDocs);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
//it would be so nice if we had a thread-safe insert
lock.lock();
try {
for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
final ScoreDoc scoreDoc = scoreDocs[j];
if (scoreDoc == hq.insertWithOverflow(scoreDoc)) {
break;
}
}
} finally {
lock.unlock();
}
return docs;
}
}
/**
* A thread subclass for searching a single searchable
*/
private static final class SearcherCallableWithSort implements Callable<TopFieldDocs> {
private final Lock lock;
private final IndexSearcher searcher;
private final Weight weight;
private final int nDocs;
private final TopFieldCollector hq;
private final Sort sort;
private final LeafSlice slice;
private final FieldDoc after;
private final boolean doDocScores;
private final boolean doMaxScore;
public SearcherCallableWithSort(Lock lock, IndexSearcher searcher, LeafSlice slice, Weight weight,
FieldDoc after, int nDocs, TopFieldCollector hq, Sort sort,
boolean doDocScores, boolean doMaxScore) {
this.lock = lock;
this.searcher = searcher;
this.weight = weight;
this.nDocs = nDocs;
this.hq = hq;
this.sort = sort;
this.slice = slice;
this.after = after;
this.doDocScores = doDocScores;
this.doMaxScore = doMaxScore;
}
private final class FakeScorer extends Scorer {
float score;
int doc;
public FakeScorer() {
super(null);
}
@Override
public int advance(int target) {
throw new UnsupportedOperationException("FakeScorer doesn't support advance(int)");
}
@Override
public int docID() {
return doc;
}
@Override
public int freq() {
throw new UnsupportedOperationException("FakeScorer doesn't support freq()");
}
@Override
public int nextDoc() {
throw new UnsupportedOperationException("FakeScorer doesn't support nextDoc()");
}
@Override
public float score() {
return score;
}
@Override
public long cost() {
return 1;
}
}
private final FakeScorer fakeScorer = new FakeScorer();
@Override
public TopFieldDocs call() throws IOException {
assert slice.leaves.length == 1;
final TopFieldDocs docs = searcher.search(Arrays.asList(slice.leaves),
weight, after, nDocs, sort, true, doDocScores || sort.needsScores(), doMaxScore);
lock.lock();
try {
final AtomicReaderContext ctx = slice.leaves[0];
final int base = ctx.docBase;
hq.setNextReader(ctx);
hq.setScorer(fakeScorer);
for(ScoreDoc scoreDoc : docs.scoreDocs) {
fakeScorer.doc = scoreDoc.doc - base;
fakeScorer.score = scoreDoc.score;
hq.collect(scoreDoc.doc-base);
}
// Carry over maxScore from sub:
if (doMaxScore && docs.getMaxScore() > hq.maxScore) {
hq.maxScore = docs.getMaxScore();
}
} finally {
lock.unlock();
}
return docs;
}
}
/**
* A helper class that wraps a {@link CompletionService} and provides an
* iterable interface to the completed {@link Callable} instances.
*
* @param <T>
* the type of the {@link Callable} return value
*/
private static final class ExecutionHelper<T> implements Iterator<T>, Iterable<T> {
private final CompletionService<T> service;
private int numTasks;
ExecutionHelper(final Executor executor) {
this.service = new ExecutorCompletionService<T>(executor);
}
@Override
public boolean hasNext() {
return numTasks > 0;
}
public void submit(Callable<T> task) {
this.service.submit(task);
++numTasks;
}
@Override
public T next() {
if(!this.hasNext())
throw new NoSuchElementException("next() is called but hasNext() returned false");
try {
return service.take().get();
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
} finally {
--numTasks;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public Iterator<T> iterator() {
// use the shortcut here - this is only used in a private context
return this;
}
}
/**
* A class holding a subset of the {@link IndexSearcher}s leaf contexts to be
* executed within a single thread.
*
* @lucene.experimental
*/
public static class LeafSlice {
final AtomicReaderContext[] leaves;
public LeafSlice(AtomicReaderContext... leaves) {
this.leaves = leaves;
}
}
@Override
public String toString() {
return "IndexSearcher(" + reader + "; executor=" + executor + ")";
}
/**
* Returns {@link TermStatistics} for a term.
*
* This can be overridden for example, to return a term's statistics
* across a distributed collection.
* @lucene.experimental
*/
public TermStatistics termStatistics(Term term, TermContext context) throws IOException {
return new TermStatistics(term.bytes(), context.docFreq(), context.totalTermFreq());
}
/**
* Returns {@link CollectionStatistics} for a field.
*
* This can be overridden for example, to return a field's statistics
* across a distributed collection.
* @lucene.experimental
*/
public CollectionStatistics collectionStatistics(String field) throws IOException {
final int docCount;
final long sumTotalTermFreq;
final long sumDocFreq;
assert field != null;
Terms terms = MultiFields.getTerms(reader, field);
if (terms == null) {
docCount = 0;
sumTotalTermFreq = 0;
sumDocFreq = 0;
} else {
docCount = terms.getDocCount();
sumTotalTermFreq = terms.getSumTotalTermFreq();
sumDocFreq = terms.getSumDocFreq();
}
return new CollectionStatistics(field, reader.maxDoc(), docCount, sumTotalTermFreq, sumDocFreq);
}
}
|
googleapis/google-cloud-java | 35,516 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FunctionCall.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/tool.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* A predicted [FunctionCall] returned from the model that contains a string
* representing the [FunctionDeclaration.name] and a structured JSON object
* containing the parameters and their values.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FunctionCall}
*/
public final class FunctionCall extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.FunctionCall)
FunctionCallOrBuilder {
private static final long serialVersionUID = 0L;
// Use FunctionCall.newBuilder() to construct.
private FunctionCall(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FunctionCall() {
id_ = "";
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FunctionCall();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ToolProto
.internal_static_google_cloud_aiplatform_v1beta1_FunctionCall_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ToolProto
.internal_static_google_cloud_aiplatform_v1beta1_FunctionCall_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.FunctionCall.class,
com.google.cloud.aiplatform.v1beta1.FunctionCall.Builder.class);
}
private int bitField0_;
public static final int ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ARGS_FIELD_NUMBER = 2;
private com.google.protobuf.Struct args_;
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the args field is set.
*/
@java.lang.Override
public boolean hasArgs() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The args.
*/
@java.lang.Override
public com.google.protobuf.Struct getArgs() {
return args_ == null ? com.google.protobuf.Struct.getDefaultInstance() : args_;
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.protobuf.StructOrBuilder getArgsOrBuilder() {
return args_ == null ? com.google.protobuf.Struct.getDefaultInstance() : args_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getArgs());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, id_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getArgs());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, id_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.FunctionCall)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.FunctionCall other =
(com.google.cloud.aiplatform.v1beta1.FunctionCall) obj;
if (!getId().equals(other.getId())) return false;
if (!getName().equals(other.getName())) return false;
if (hasArgs() != other.hasArgs()) return false;
if (hasArgs()) {
if (!getArgs().equals(other.getArgs())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasArgs()) {
hash = (37 * hash) + ARGS_FIELD_NUMBER;
hash = (53 * hash) + getArgs().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.FunctionCall prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A predicted [FunctionCall] returned from the model that contains a string
* representing the [FunctionDeclaration.name] and a structured JSON object
* containing the parameters and their values.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.FunctionCall}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.FunctionCall)
com.google.cloud.aiplatform.v1beta1.FunctionCallOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ToolProto
.internal_static_google_cloud_aiplatform_v1beta1_FunctionCall_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ToolProto
.internal_static_google_cloud_aiplatform_v1beta1_FunctionCall_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.FunctionCall.class,
com.google.cloud.aiplatform.v1beta1.FunctionCall.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.FunctionCall.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getArgsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
name_ = "";
args_ = null;
if (argsBuilder_ != null) {
argsBuilder_.dispose();
argsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ToolProto
.internal_static_google_cloud_aiplatform_v1beta1_FunctionCall_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FunctionCall getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.FunctionCall.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FunctionCall build() {
com.google.cloud.aiplatform.v1beta1.FunctionCall result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FunctionCall buildPartial() {
com.google.cloud.aiplatform.v1beta1.FunctionCall result =
new com.google.cloud.aiplatform.v1beta1.FunctionCall(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.FunctionCall result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.args_ = argsBuilder_ == null ? args_ : argsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.FunctionCall) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.FunctionCall) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.FunctionCall other) {
if (other == com.google.cloud.aiplatform.v1beta1.FunctionCall.getDefaultInstance())
return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasArgs()) {
mergeArgs(other.getArgs());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
input.readMessage(getArgsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 18
case 26:
{
id_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The unique id of the function call. If populated, the client to
* execute the `function_call` and return the response with the matching `id`.
* </pre>
*
* <code>string id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the function to call.
* Matches [FunctionDeclaration.name].
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Struct args_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
argsBuilder_;
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the args field is set.
*/
public boolean hasArgs() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The args.
*/
public com.google.protobuf.Struct getArgs() {
if (argsBuilder_ == null) {
return args_ == null ? com.google.protobuf.Struct.getDefaultInstance() : args_;
} else {
return argsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setArgs(com.google.protobuf.Struct value) {
if (argsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
args_ = value;
} else {
argsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setArgs(com.google.protobuf.Struct.Builder builderForValue) {
if (argsBuilder_ == null) {
args_ = builderForValue.build();
} else {
argsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeArgs(com.google.protobuf.Struct value) {
if (argsBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& args_ != null
&& args_ != com.google.protobuf.Struct.getDefaultInstance()) {
getArgsBuilder().mergeFrom(value);
} else {
args_ = value;
}
} else {
argsBuilder_.mergeFrom(value);
}
if (args_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearArgs() {
bitField0_ = (bitField0_ & ~0x00000004);
args_ = null;
if (argsBuilder_ != null) {
argsBuilder_.dispose();
argsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.Struct.Builder getArgsBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getArgsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.StructOrBuilder getArgsOrBuilder() {
if (argsBuilder_ != null) {
return argsBuilder_.getMessageOrBuilder();
} else {
return args_ == null ? com.google.protobuf.Struct.getDefaultInstance() : args_;
}
}
/**
*
*
* <pre>
* Optional. Required. The function parameters and values in JSON object
* format. See [FunctionDeclaration.parameters] for parameter details.
* </pre>
*
* <code>.google.protobuf.Struct args = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
getArgsFieldBuilder() {
if (argsBuilder_ == null) {
argsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>(getArgs(), getParentForChildren(), isClean());
args_ = null;
}
return argsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.FunctionCall)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.FunctionCall)
private static final com.google.cloud.aiplatform.v1beta1.FunctionCall DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.FunctionCall();
}
public static com.google.cloud.aiplatform.v1beta1.FunctionCall getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FunctionCall> PARSER =
new com.google.protobuf.AbstractParser<FunctionCall>() {
@java.lang.Override
public FunctionCall parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FunctionCall> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FunctionCall> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FunctionCall getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-kie-drools | 35,474 | drools-test-coverage/test-compiler-integration/src/test/java/org/drools/mvel/integrationtests/phreak/RemoveRuleTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.drools.mvel.integrationtests.phreak;
import org.drools.base.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.NodeMemories;
import org.drools.core.impl.InternalRuleBase;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.EvalConditionNode;
import org.drools.core.reteoo.JoinNode;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.LeftInputAdapterNode.LiaNodeMemory;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.PathMemory;
import org.drools.core.reteoo.RuleTerminalNode;
import org.drools.core.reteoo.SegmentMemory;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.kiesession.rulebase.KnowledgeBaseFactory;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.TestParametersUtil2;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.kie.api.KieBase;
import org.kie.api.definition.KiePackage;
import org.kie.api.runtime.rule.Match;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.drools.core.phreak.PhreakBuilder.isEagerSegmentCreation;
public class RemoveRuleTest {
public static Stream<KieBaseTestConfiguration> parameters() {
return TestParametersUtil2.getKieBaseCloudConfigurations(true).stream();
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedSingleRuleNoSharing(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase.newKieSession());
wm.insert(new A(1));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new X(1));
wm.insert(new E(1));
wm.fireAllRules();
kbase.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r1", " A() B() C(object == 2) X() E()\n") );
List list = new ArrayList();
wm.setGlobal("list", list);
ObjectTypeNode aotn = getObjectTypeNode(kbase, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getObjectSinkPropagator().getSinks()[0];
LiaNodeMemory lm = wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
assertThat(sm.getStagedLeftTuples().getInsertFirst()).isNotNull();
wm.fireAllRules();
BetaMemory bMem = ( BetaMemory ) sm.getNodeMemories()[1];
assertThat(bMem.getLeftTupleMemory().size()).isEqualTo(1);
assertThat(bMem.getRightTupleMemory().size()).isEqualTo(1);
BetaMemory eMem = ( BetaMemory ) sm.getNodeMemories()[4];
assertThat(eMem.getLeftTupleMemory().size()).isEqualTo(1);
assertThat(eMem.getRightTupleMemory().size()).isEqualTo(1);
NodeMemories nms = wm.getNodeMemories();
assertThat(countNodeMemories(nms)).isEqualTo(6);
assertThat(sm.getStagedLeftTuples().getInsertFirst()).isNull();
assertThat(list.size()).isEqualTo(1);
assertThat(((Match) list.get(0)).getRule().getName()).isEqualTo("r1");
kbase.removeRule("org.kie", "r1");
assertThat(countNodeMemories(nms)).isEqualTo(0);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedSingleRuleNoSharingWithSubnetworkAtStart(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase.newKieSession());
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new X(1));
wm.insert(new E(1));
wm.insert(new C(2));
wm.fireAllRules();
kbase.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r1", " A() not( B() and C() ) X() E()\n") );
List list = new ArrayList();
wm.setGlobal("list", list);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
assertThat(((Match) list.get(0)).getRule().getName()).isEqualTo("r1");
assertThat(((Match) list.get(1)).getRule().getName()).isEqualTo("r1");
kbase.removeRule("org.kie", "r1");
wm.insert(new A(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
}
private int countNodeMemories(NodeMemories nms) {
int count = 0;
for ( int i = 0; i < nms.length(); i++ ) {
if ( nms.peekNodeMemory(i) != null ) {
System.out.println(nms.peekNodeMemory(i) );
count++;
}
}
return count;
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedRuleMidwayShare(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A() B() C(1;) X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new X(1));
wm.insert(new E(1));
wm.fireAllRules();
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " a : A() B() C(2;) X() E()\n") );
wm.fireAllRules();
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getObjectSinkPropagator().getSinks()[0];
JoinNode bNode = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c1Node = (JoinNode) bNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c2Node = (JoinNode) bNode.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory c1Mem = (BetaMemory) wm.getNodeMemory(c1Node);
assertThat(c1Mem.getSegmentMemory()).isSameAs(sm.getFirst());
assertThat(c1Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(c1Mem.getRightTupleMemory().size()).isEqualTo(1);
BetaMemory c2Mem = (BetaMemory) wm.getNodeMemory(c2Node);
SegmentMemory c2Smem = sm.getFirst().getNext();
assertThat(c2Mem.getSegmentMemory()).isSameAs(c2Smem);
assertThat(c2Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(c2Mem.getRightTupleMemory().size()).isEqualTo(1);
assertThat(list.size()).isEqualTo(6);
kbase1.removeRule("org.kie", "r2");
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
assertThat(sm.getFirst()).isNull();
assertThat(c1Mem.getSegmentMemory()).isSameAs(sm); // c1SMem repoints back to original Smem
wm.insert(new A(1));
wm.fireAllRules();
assertThat(((Match) list.get(6)).getRule().getName()).isEqualTo("r1");
assertThat(list.size()).isEqualTo(7); // only one more added, as second rule as removed
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedRuleWithEvals(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " a:A() B() eval(1==1) eval(1==1) C(1;) \n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new X(1));
wm.insert(new E(1));
wm.fireAllRules();
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " a:A() B() eval(1==1) eval(1==1) C(2;) \n") );
wm.fireAllRules();
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getObjectSinkPropagator().getSinks()[0];
JoinNode bNode = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
EvalConditionNode e1 = (EvalConditionNode) bNode.getSinkPropagator().getFirstLeftTupleSink();
EvalConditionNode e2 = (EvalConditionNode) e1.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c1Node = (JoinNode) e2.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c2Node = (JoinNode) e2.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory c1Mem = (BetaMemory) wm.getNodeMemory(c1Node);
assertThat(c1Mem.getSegmentMemory()).isSameAs(sm.getFirst());
assertThat(c1Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(c1Mem.getRightTupleMemory().size()).isEqualTo(1);
BetaMemory c2Mem = (BetaMemory) wm.getNodeMemory(c2Node);
SegmentMemory c2Smem = sm.getFirst().getNext();
assertThat(c2Mem.getSegmentMemory()).isSameAs(c2Smem);
assertThat(c2Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(c2Mem.getRightTupleMemory().size()).isEqualTo(1);
assertThat(list.size()).isEqualTo(6);
kbase1.removeRule("org.kie", "r2");
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
assertThat(sm.getFirst()).isNull();
assertThat(c1Mem.getSegmentMemory()).isSameAs(sm); // c1SMem repoints back to original Smem
wm.insert(new A(1));
wm.fireAllRules();
assertThat(((Match) list.get(6)).getRule().getName()).isEqualTo("r1");
assertThat(list.size()).isEqualTo(7); // only one more added, as second rule as removed
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedSharedLiaNode(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A() B(1;) C() X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new B(2));
wm.insert(new C(1));
wm.insert(new X(1));
wm.insert(new E(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(3);
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " a : A() B(2;) C() X() E()\n") );
wm.fireAllRules();
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(11);
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getObjectSinkPropagator().getSinks()[0];
JoinNode b1Node = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode b2Node = (JoinNode) liaNode.getSinkPropagator().getLastLeftTupleSink();
JoinNode c1Node = (JoinNode) b1Node.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory b1Mem = (BetaMemory) wm.getNodeMemory(b1Node);
assertThat(b1Mem.getSegmentMemory()).isSameAs(sm.getFirst());
assertThat(b1Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(b1Mem.getRightTupleMemory().size()).isEqualTo(1);
BetaMemory b2Mem = (BetaMemory) wm.getNodeMemory(b2Node);
SegmentMemory b2Smem = sm.getFirst().getNext();
assertThat(b2Mem.getSegmentMemory()).isSameAs(b2Smem);
assertThat(b2Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(b2Mem.getRightTupleMemory().size()).isEqualTo(1);
assertThat(list.size()).isEqualTo(6);
BetaMemory c1Mem = (BetaMemory) wm.getNodeMemory(c1Node);
assertThat(c1Mem.getSegmentMemory()).isSameAs(b1Mem.getSegmentMemory());
assertThat(b2Mem.getSegmentMemory()).isNotSameAs(b1Mem.getSegmentMemory());
wm.fireAllRules();
assertThat(list.size()).isEqualTo(6);
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(11);
kbase1.removeRule("org.kie", "r2");
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
assertThat(b1Mem.getSegmentMemory()).isSameAs(sm);
assertThat(c1Mem.getSegmentMemory()).isSameAs(sm);
assertThat(sm.getFirst()).isNull();
assertThat(b1Mem.getLeftTupleMemory().size()).isEqualTo(3);
assertThat(b1Mem.getRightTupleMemory().size()).isEqualTo(1);
//SegmentMemory b2Smem = sm.getFirst().remove();
assertThat(b2Mem.getSegmentMemory()).isSameAs(b2Smem);
wm.insert(new A(1));
wm.fireAllRules();
assertThat(((Match) list.get(6)).getRule().getName()).isEqualTo("r1");
assertThat(list.size()).isEqualTo(7); // only one more added, as second rule as removed
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedSharedLiaNodeNoBeta(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
A a1 = new A(1);
InternalFactHandle fh1 = (InternalFactHandle) wm.insert(a1);
A a2 = new A(2);
InternalFactHandle fh2 = (InternalFactHandle) wm.insert(a2);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " a : A()\n") );
wm.fireAllRules();
assertThat(list.size()).isEqualTo(4);
kbase1.removeRule("org.kie", "r1");
kbase1.removeRule("org.kie", "r2");
list.clear();
assertThat(fh1.getFirstLeftTuple()).isNull();
assertThat(fh2.getFirstLeftTuple()).isNull();
wm.update( fh1,a1 );
wm.update( fh2,a2 );
wm.fireAllRules();
assertThat(list.size()).isEqualTo(0);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testAlphaTerminalNodesDontShareWithLian(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;)\n");
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;) B(1;) C(1;)\n") );
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
RuleTerminalNode rtn1 = getRtn("org.kie.r1", kbase1);
RuleTerminalNode rtn2 = getRtn("org.kie.r2", kbase1);
PathMemory pmem1 = wm.getNodeMemory(rtn1);
PathMemory pmem2 = wm.getNodeMemory(rtn2);
if (isEagerSegmentCreation()) {
assertThat(pmem1.getPathEndNode().getSegmentPrototypes().length).isEqualTo(1);
assertThat(pmem2.getPathEndNode().getSegmentPrototypes().length).isEqualTo(1);
}
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedSharedToRtn(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A() B() C() X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new X(1));
wm.insert(new E(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A() B() C() X() E()\n") );
wm.fireAllRules();
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(7);
assertThat(list.size()).isEqualTo(4);
RuleTerminalNode rtn1 = getRtn("org.kie.r1", kbase1);
RuleTerminalNode rtn2 = getRtn("org.kie.r2", kbase1);
PathMemory pmem1 = wm.getNodeMemory(rtn1);
PathMemory pmem2 = wm.getNodeMemory(rtn2);
SegmentMemory[] smems1 = pmem1.getSegmentMemories();
SegmentMemory[] smems2 = pmem2.getSegmentMemories();
assertThat(smems1.length).isEqualTo(2);
assertThat(smems2.length).isEqualTo(2);
assertThat(smems2[0]).isSameAs(smems1[0]);
assertThat(smems2[1]).isNotSameAs(smems1[1]);
SegmentMemory sm = smems1[0];
assertThat(sm.getFirst()).isEqualTo(smems1[1]);
JoinNode eNode1 = ( JoinNode ) rtn1.getLeftTupleSource();
JoinNode eNode2 = ( JoinNode ) rtn2.getLeftTupleSource();
assertThat(eNode2).isSameAs(eNode1);
pmem1 = wm.getNodeMemory(rtn1);
kbase1.removeRule("org.kie", "r2");
System.out.println( "---" );
assertThat(countNodeMemories(wm.getNodeMemories())).isEqualTo(6);
assertThat(sm.getFirst()).isNull();
pmem1 = wm.getNodeMemory(rtn1);
smems1 = pmem1.getSegmentMemories();
assertThat(smems1.length).isEqualTo(1);
assertThat(smems1[0]).isSameAs(sm);
wm.insert(new A(1));
wm.fireAllRules();
assertThat(((Match) list.get(4)).getRule().getName()).isEqualTo("r1");
assertThat(list.size()).isEqualTo(5); // only one more added, as second rule as removed
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedMultipleSharesRemoveFirst(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) A(2;) B(1;) B(2;) C(1;) X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new X(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;) A(2;) B(1;) B(2;) C(2;) X() E()\n") );
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r3", " A(1;) A(3;) B(1;) B(2;) C(2;) X() E()\n") );
wm.fireAllRules();
assertThat(list.size()).isEqualTo(5);
kbase1.removeRule("org.kie", "r1");
list.clear();
update10Facts(wm, fh1, fh2, fh3, fh4, fh5, fh6, fh7, fh8, fh9, fh10);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(3);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedMultipleSharesRemoveMid(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) A(2;) B(1;) B(2;) C(1;) X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new X(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;) A(2;) B(1;) B(2;) C(2;) X() E()\n") );
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r3", " A(1;) A(3;) B(1;) B(2;) C(2;) X() E()\n") );
wm.fireAllRules();
assertThat(list.size()).isEqualTo(5);
kbase1.removeRule("org.kie", "r2");
list.clear();
update10Facts(wm, fh1, fh2, fh3, fh4, fh5, fh6, fh7, fh8, fh9, fh10);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(3);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPopulatedMultipleSharesRemoveLast(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) A(2;) B(1;) B(2;) C(1;) X() E()\n");
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new X(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertThat(list.size()).isEqualTo(2);
RuleTerminalNode rtn1 = getRtn("org.kie.r1", kbase1);
PathMemory pmem1 = wm.getNodeMemory(rtn1);
assertThat(pmem1.getSegmentMemories().length).isEqualTo(1);
assertSegmentMemory(pmem1, 0, 127, 127, 127);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;) A(2;) B(1;) B(2;) C(2;) X() E()\n") );
list.clear();
update10Facts(wm, fh1, fh2, fh3, fh4, fh5, fh6, fh7, fh8, fh9, fh10);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(4);
RuleTerminalNode rtn2 = getRtn("org.kie.r1", kbase1);
PathMemory pmem2 = wm.getNodeMemory(rtn2);
assertThat(pmem1.getSegmentMemories().length).isEqualTo(2);
assertSegmentMemory(pmem1, 0, 15, 15, 15);
assertSegmentMemory(pmem1, 1, 7, 7, 7);
assertThat(pmem2.getSegmentMemories().length).isEqualTo(2);
assertSegmentMemory(pmem2, 0, 15, 15, 15);
assertSegmentMemory(pmem2, 1, 7, 7, 7);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r3", " A(1;) A(3;) B(1;) B(2;) C(2;) X() E()\n") );
list.clear();
update10Facts(wm, fh1, fh2, fh3, fh4, fh5, fh6, fh7, fh8, fh9, fh10);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(5);
RuleTerminalNode rtn3 = getRtn("org.kie.r3", kbase1);
PathMemory pmem3 = wm.getNodeMemory(rtn3);
assertThat(pmem1.getSegmentMemories().length).isEqualTo(3);
assertSegmentMemory(pmem1, 0, 1, 1, 1);
assertSegmentMemory(pmem1, 1, 7, 7, 7);
assertSegmentMemory(pmem1, 2, 7, 7, 7);
assertThat(pmem2.getSegmentMemories().length).isEqualTo(3);
assertSegmentMemory(pmem2, 0, 1, 1, 1);
assertSegmentMemory(pmem2, 1, 7, 7, 7);
assertSegmentMemory(pmem2, 2, 7, 7, 7);
assertThat(pmem3.getSegmentMemories().length).isEqualTo(2);
assertSegmentMemory(pmem3, 0, 1, 1, 1);
assertSegmentMemory(pmem3, 1, 63, 63, 63);
kbase1.removeRule("org.kie", "r3");
list.clear();
update10Facts(wm, fh1, fh2, fh3, fh4, fh5, fh6, fh7, fh8, fh9, fh10);
wm.fireAllRules();
assertThat(list.size()).isEqualTo(4);
assertThat(pmem1.getSegmentMemories().length).isEqualTo(2);
assertSegmentMemory(pmem1, 0, 15, 15, 15);
assertSegmentMemory(pmem1, 1, 7, 7, 7);
assertThat(pmem2.getSegmentMemories().length).isEqualTo(2);
assertSegmentMemory(pmem2, 0, 15, 15, 15);
assertSegmentMemory(pmem2, 1, 7, 7, 7);
}
private void assertSegmentMemory(PathMemory pmem, int segmentPos, int linkedMask, int dirtyMask, int allMask) {
if (isEagerSegmentCreation()) {
assertThat(pmem.getSegmentMemories()[segmentPos].getLinkedNodeMask()).isEqualTo(linkedMask);
assertThat(pmem.getSegmentMemories()[segmentPos].getDirtyNodeMask()).isEqualTo(dirtyMask);
assertThat(pmem.getSegmentMemories()[segmentPos].getAllLinkedMaskTest()).isEqualTo(allMask);
}
}
private static void update10Facts(InternalWorkingMemory wm, InternalFactHandle fh1, InternalFactHandle fh2, InternalFactHandle fh3, InternalFactHandle fh4, InternalFactHandle fh5, InternalFactHandle fh6, InternalFactHandle fh7, InternalFactHandle fh8, InternalFactHandle fh9, InternalFactHandle fh10) {
wm.update(fh1, fh1.getObject());
wm.update(fh2, fh2.getObject());
wm.update(fh3, fh3.getObject());
wm.update(fh4, fh4.getObject());
wm.update(fh5, fh5.getObject());
wm.update(fh6, fh6.getObject());
wm.update(fh7, fh7.getObject());
wm.update(fh8, fh8.getObject());
wm.update(fh9, fh9.getObject());
wm.update(fh10, fh10.getObject());
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPathMemorySizeAfterSegmentMerge(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
// The two A(1;) are not actually shared, as r2 creates an AlphaTerminalNode
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) B(1;)\n");
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;)\n") );
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
// trigger segment initialization
wm.insert(new A(1));
wm.insert(new B(1));
wm.fireAllRules();
RuleTerminalNode rtn1 = getRtn( "org.kie.r1", kbase1 );
RuleTerminalNode rtn2 = getRtn( "org.kie.r2", kbase1 );
assertThat(wm.getNodeMemory(rtn1).getSegmentMemories().length).isEqualTo(1);
assertThat(wm.getNodeMemory(rtn2).getSegmentMemories().length).isEqualTo(1);
kbase1.removeRule("org.kie", "r2");
assertThat(wm.getNodeMemory(rtn1).getSegmentMemories().length).isEqualTo(1);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testPathMemorySizeAfterSegmentMergeNonInitialized(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) B(1;)\n");
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;)\n") );
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
RuleTerminalNode rtn1 = getRtn( "org.kie.r1", kbase1 );
RuleTerminalNode rtn2 = getRtn( "org.kie.r2", kbase1 );
assertThat(wm.getNodeMemory(rtn1).getSegmentMemories().length).isEqualTo(1);
assertThat(wm.getNodeMemory(rtn2).getSegmentMemories().length).isEqualTo(1);
kbase1.removeRule("org.kie", "r2");
assertThat(wm.getNodeMemory(rtn1).getSegmentMemories().length).isEqualTo(1);
}
@ParameterizedTest(name = "KieBase type={0}")
@MethodSource("parameters")
public void testSplitTwoBeforeCreatedSegment(KieBaseTestConfiguration kieBaseTestConfiguration) throws Exception {
InternalKnowledgeBase kbase1 = buildKnowledgeBase(kieBaseTestConfiguration, "r1", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) X(1;) X(2;) E(1;) E(2;)\n");
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r2", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) X(1;) X(2;) E(1;) E(2;)\n") );
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r3", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) X(1;) X(2;)\n") );
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r4", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) \n") );
InternalWorkingMemory wm = ((InternalWorkingMemory)kbase1.newKieSession());
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new E(1));
wm.insert(new E(2));
wm.flushPropagations();
RuleTerminalNode rtn1 = getRtn( "org.kie.r1", kbase1 );
RuleTerminalNode rtn2 = getRtn( "org.kie.r2", kbase1 );
RuleTerminalNode rtn3 = getRtn( "org.kie.r3", kbase1 );
RuleTerminalNode rtn4 = getRtn( "org.kie.r4", kbase1 );
PathMemory pm1 = wm.getNodeMemory(rtn1);
SegmentMemory[] smems = pm1.getSegmentMemories();
assertThat(smems.length).isEqualTo(4);
assertThat(smems[0]).isNull();
assertThat(smems[1]).isNull();
assertThat(smems[3]).isNull();
SegmentMemory sm = smems[2];
assertThat(sm.getPos()).isEqualTo(2);
assertThat(sm.getSegmentPosMaskBit()).isEqualTo(4);
assertThat(pm1.getLinkedSegmentMask()).isEqualTo(4);
kbase1.addPackages( buildKnowledgePackage(kieBaseTestConfiguration, "r5", " A(1;) A(2;) B(1;) B(2;) \n") );
smems = pm1.getSegmentMemories();
assertThat(smems.length).isEqualTo(5);
assertThat(smems[0]).isNull();
assertThat(smems[1]).isNull();
assertThat(smems[2]).isNull();
sm = smems[3];
assertThat(sm.getPos()).isEqualTo(3);
assertThat(sm.getSegmentPosMaskBit()).isEqualTo(8);
assertThat(pm1.getLinkedSegmentMask()).isEqualTo(8);
RuleTerminalNode rtn5 = getRtn( "org.kie.r5", kbase1 );
PathMemory pm5 = wm.getNodeMemory(rtn5);
if (isEagerSegmentCreation()) {
assertThat(pm5.getPathEndNode().getSegmentPrototypes().length).isEqualTo(2);
}
}
private RuleTerminalNode getRtn(String ruleName, InternalKnowledgeBase kbase) {
return ( RuleTerminalNode ) kbase.getReteooBuilder().getTerminalNodes(ruleName)[0];
}
private String buildKnowledgePackageDrl(String ruleName, String rule) {
String str = "";
str += "package org.kie \n";
str += "import " + A.class.getCanonicalName() + "\n";
str += "import " + B.class.getCanonicalName() + "\n";
str += "import " + C.class.getCanonicalName() + "\n";
str += "import " + X.class.getCanonicalName() + "\n";
str += "import " + E.class.getCanonicalName() + "\n";
str += "global java.util.List list \n";
str += "rule " + ruleName + " when \n";
str += rule;
str += "then \n";
str += " list.add( kcontext.getMatch() );\n";
str += "end \n";
return str;
}
private String addRule(String ruleName, String rule) {
String str = "";
str += "rule " + ruleName + " when \n";
str += rule;
str += "then \n";
str += " list.add( kcontext.getMatch() );\n";
str += "end \n";
return str;
}
private InternalKnowledgeBase buildKnowledgeBase(KieBaseTestConfiguration kieBaseTestConfiguration, String ruleName, String rule) {
return (InternalKnowledgeBase)KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, buildKnowledgePackageDrl(ruleName, rule));
}
private Collection<KiePackage> buildKnowledgePackage(KieBaseTestConfiguration kieBaseTestConfiguration, String ruleName, String rule) {
return KieBaseUtil.getKieBaseFromKieModuleFromDrl("tmp", kieBaseTestConfiguration, buildKnowledgePackageDrl(ruleName, rule)).getKiePackages();
}
public ObjectTypeNode getObjectTypeNode(KieBase kbase, Class<?> nodeClass) {
List<ObjectTypeNode> nodes = ((InternalRuleBase)kbase).getRete().getObjectTypeNodes();
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == nodeClass ) {
return n;
}
}
return null;
}
}
|
apache/solr | 35,135 | solr/core/src/test/org/apache/solr/pkg/TestPackages.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.pkg;
import static org.apache.solr.common.cloud.ZkStateReader.SOLR_PKGS_PATH;
import static org.apache.solr.common.params.CommonParams.JAVABIN;
import static org.apache.solr.common.params.CommonParams.WT;
import static org.apache.solr.core.TestSolrConfigHandler.getFileContent;
import static org.apache.solr.filestore.TestDistribFileStore.checkAllNodesForFile;
import static org.apache.solr.filestore.TestDistribFileStore.readFile;
import static org.apache.solr.filestore.TestDistribFileStore.uploadKey;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory;
import org.apache.lucene.analysis.pattern.PatternReplaceCharFilterFactory;
import org.apache.lucene.util.ResourceLoader;
import org.apache.lucene.util.ResourceLoaderAware;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.GenericSolrRequest;
import org.apache.solr.client.solrj.request.RequestWriter;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.request.V2Request;
import org.apache.solr.client.solrj.request.beans.PackagePayload;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.NavigableObject;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.annotation.JsonProperty;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ReflectMapWriter;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.embedded.JettySolrRunner;
import org.apache.solr.filestore.ClusterFileStore;
import org.apache.solr.filestore.TestDistribFileStore;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.security.AuthorizationContext;
import org.apache.solr.util.LogLevel;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.apache.zookeeper.data.Stat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@LogLevel("org.apache.solr.pkg.PackageLoader=DEBUG;org.apache.solr.pkg.PackageAPI=DEBUG")
public class TestPackages extends SolrCloudTestCase {
@Before
@Override
public void setUp() throws Exception {
super.setUp();
System.setProperty("solr.packages.enabled", "true");
configureCluster(4)
.withJettyConfig(jetty -> jetty.enableV2(true))
.addConfig("conf", configset("conf3"))
.addConfig("conf1", configset("schema-package"))
.configure();
}
@After
@Override
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
System.clearProperty("solr.packages.enabled");
super.tearDown();
}
public static class ConfigPlugin implements ReflectMapWriter {
@JsonProperty public String name;
@JsonProperty("class")
public String klass;
}
@Test
public void testCoreReloadingPlugin() throws Exception {
String FILE1 = "/mypkg/runtimelibs.jar";
String COLLECTION_NAME = "testCoreReloadingPluginColl";
byte[] derFile = readFile("cryptokeys/pub_key512.der");
uploadKey(derFile, ClusterFileStore.KEYS_DIR + "/pub_key512.der", cluster);
postFileAndWait(
cluster,
"runtimecode/runtimelibs.jar.bin",
FILE1,
"L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
PackagePayload.AddVersion add = new PackagePayload.AddVersion();
add.version = "1.0";
add.pkg = "mypkg";
add.files = Arrays.asList(new String[] {FILE1});
V2Request req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("add", add))
.build();
req.process(cluster.getSolrClient());
TestDistribFileStore.assertResponseValues(
10,
() ->
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.GET)
.build()
.process(cluster.getSolrClient()),
Map.of(
":result:packages:mypkg[0]:version",
"1.0",
":result:packages:mypkg[0]:files[0]",
FILE1));
CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 2)
.process(cluster.getSolrClient());
cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4);
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "query", "filterCache", add.pkg, add.version);
add.version = "2.0";
req.process(cluster.getSolrClient());
TestDistribFileStore.assertResponseValues(
10,
() ->
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.GET)
.build()
.process(cluster.getSolrClient()),
Map.of(
":result:packages:mypkg[1]:version",
"2.0",
":result:packages:mypkg[1]:files[0]",
FILE1));
new UpdateRequest().commit(cluster.getSolrClient(), COLLECTION_NAME);
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "query", "filterCache", "mypkg", "2.0");
}
@Test
public void testPluginLoading() throws Exception {
String FILE1 = "/mypkg/runtimelibs.jar";
String FILE2 = "/mypkg/runtimelibs_v2.jar";
String FILE3 = "/mypkg/runtimelibs_v3.jar";
String URP1 = "/mypkg/testurpv1.jar";
String URP2 = "/mypkg/testurpv2.jar";
String EXPR1 = "/mypkg/expressible.jar";
String COLLECTION_NAME = "testPluginLoadingColl";
byte[] derFile = readFile("cryptokeys/pub_key512.der");
uploadKey(derFile, ClusterFileStore.KEYS_DIR + "/pub_key512.der", cluster);
postFileAndWait(
cluster,
"runtimecode/runtimelibs.jar.bin",
FILE1,
"L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
postFileAndWait(
cluster,
"runtimecode/testurp_v1.jar.bin",
URP1,
"h6UmMzuPqu4hQFGLBMJh/6kDSEXpJlgLsQDXx0KuxXWkV5giilRP57K3towiJRh2J+rqihqIghNCi3YgzgUnWQ==");
postFileAndWait(
cluster,
"runtimecode/expressible.jar.bin",
EXPR1,
"ZOT11arAiPmPZYOHzqodiNnxO9pRyRozWZEBX8XGjU1/HJptFnZK+DI7eXnUtbNaMcbXE2Ze8hh4M/eGyhY8BQ==");
PackagePayload.AddVersion add = new PackagePayload.AddVersion();
add.version = "1.0";
add.pkg = "mypkg";
add.files = Arrays.asList(new String[] {FILE1, URP1, EXPR1});
V2Request req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("add", add))
.build();
req.process(cluster.getSolrClient());
CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 2)
.process(cluster.getSolrClient());
cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4);
TestDistribFileStore.assertResponseValues(
10,
() ->
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.GET)
.build()
.process(cluster.getSolrClient()),
Map.of(
":result:packages:mypkg[0]:version",
"1.0",
":result:packages:mypkg[0]:files[0]",
FILE1));
Map<String, ConfigPlugin> plugins = new LinkedHashMap<>();
ConfigPlugin p = new ConfigPlugin();
p.klass = "mypkg:org.apache.solr.core.RuntimeLibReqHandler";
p.name = "/runtime";
plugins.put("create-requesthandler", p);
p = new ConfigPlugin();
p.klass = "mypkg:org.apache.solr.core.RuntimeLibSearchComponent";
p.name = "get";
plugins.put("create-searchcomponent", p);
p = new ConfigPlugin();
p.klass = "mypkg:org.apache.solr.core.RuntimeLibResponseWriter";
p.name = "json1";
plugins.put("create-queryResponseWriter", p);
p = new ConfigPlugin();
p.klass = "mypkg:org.apache.solr.update.TestVersionedURP";
p.name = "myurp";
plugins.put("create-updateProcessor", p);
p = new ConfigPlugin();
p.klass = "mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric";
p.name = "mincopy";
plugins.put("create-expressible", p);
V2Request v2r =
new V2Request.Builder("/c/" + COLLECTION_NAME + "/config")
.withMethod(SolrRequest.METHOD.POST)
.withPayload(plugins)
.forceV2(true)
.build();
cluster.getSolrClient().request(v2r);
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.0");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.0");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.0");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "updateProcessor", "myurp", "mypkg", "1.0");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "expressible", "mincopy", "mypkg", "1.0");
TestDistribFileStore.assertResponseValues(
10,
cluster.getSolrClient(),
new GenericSolrRequest(
SolrRequest.METHOD.GET,
"/stream",
SolrRequest.SolrRequestType.ADMIN,
new MapSolrParams(
Map.of("collection", COLLECTION_NAME, WT, JAVABIN, "action", "plugins")))
.setRequiresCollection(true),
Map.of(":plugins:mincopy", "org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric"));
UpdateRequest ur = new UpdateRequest();
ur.add(new SolrInputDocument("id", "1"));
ur.setParam("processor", "myurp");
ur.process(cluster.getSolrClient(), COLLECTION_NAME);
cluster.getSolrClient().commit(COLLECTION_NAME, true, true);
QueryResponse result = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("id:1"));
assertEquals("Version 1", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s"));
executeReq(
"/" + COLLECTION_NAME + "/runtime?wt=javabin",
cluster.getRandomJetty(random()),
Utils.JAVABINCONSUMER,
Map.of("class", "org.apache.solr.core.RuntimeLibReqHandler"));
executeReq(
"/" + COLLECTION_NAME + "/get?wt=json",
cluster.getRandomJetty(random()),
Utils.JSONCONSUMER,
Map.of("Version", "1"));
executeReq(
"/" + COLLECTION_NAME + "/runtime?wt=json1",
cluster.getRandomJetty(random()),
Utils.JSONCONSUMER,
Map.of("wt", "org.apache.solr.core.RuntimeLibResponseWriter"));
// now upload the second jar
postFileAndWait(
cluster,
"runtimecode/runtimelibs_v2.jar.bin",
FILE2,
"j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA==");
postFileAndWait(
cluster,
"runtimecode/testurp_v2.jar.bin",
URP2,
"P/ptFXRvQMd4oKPvadSpd+A9ffwY3gcex5GVFVRy3df0/OF8XT5my8rQz7FZva+2ORbWxdXS8NKwNrbPVHLGXw==");
// add the version using package API
add.version = "1.1";
add.files = Arrays.asList(new String[] {FILE2, URP2, EXPR1});
req.process(cluster.getSolrClient());
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "updateProcessor", "myurp", "mypkg", "1.1");
executeReq(
"/" + COLLECTION_NAME + "/get?wt=json",
cluster.getRandomJetty(random()),
Utils.JSONCONSUMER,
Map.of("Version", "2"));
// now upload the third jar
postFileAndWait(
cluster,
"runtimecode/runtimelibs_v3.jar.bin",
FILE3,
"a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==");
add.version = "2.1";
add.files = Arrays.asList(new String[] {FILE3, URP2, EXPR1});
req.process(cluster.getSolrClient());
// now let's verify that the classes are updated
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1");
executeReq(
"/" + COLLECTION_NAME + "/runtime?wt=json",
cluster.getRandomJetty(random()),
Utils.JSONCONSUMER,
Map.of("Version", "2"));
// insert a doc with urp
ur = new UpdateRequest();
ur.add(new SolrInputDocument("id", "2"));
ur.setParam("processor", "myurp");
ur.process(cluster.getSolrClient(), COLLECTION_NAME);
cluster.getSolrClient().commit(COLLECTION_NAME, true, true);
result = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("id:2"));
assertEquals("Version 2", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s"));
PackagePayload.DelVersion delVersion = new PackagePayload.DelVersion();
delVersion.pkg = "mypkg";
delVersion.version = "1.0";
V2Request delete =
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.POST)
.forceV2(true)
.withPayload(Collections.singletonMap("delete", delVersion))
.build();
delete.process(cluster.getSolrClient());
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1");
// now remove the hughest version. So, it will roll back to the next highest one
delVersion.version = "2.1";
delete.process(cluster.getSolrClient());
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1");
ModifiableSolrParams params = new ModifiableSolrParams();
params.add("collection", COLLECTION_NAME);
new GenericSolrRequest(
SolrRequest.METHOD.POST, "/config/params", SolrRequest.SolrRequestType.ADMIN, params) {
@Override
public RequestWriter.ContentWriter getContentWriter(String expectedType) {
return new RequestWriter.StringPayloadContentWriter(
"{set:{PKG_VERSIONS:{mypkg : '1.1'}}}", ClientUtils.TEXT_JSON);
}
}.setRequiresCollection(true).process(cluster.getSolrClient());
add.version = "2.1";
add.files = Arrays.asList(new String[] {FILE3, URP2, EXPR1});
req.process(cluster.getSolrClient());
// the collections mypkg is set to use version 1.1
// so no upgrade
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1");
new GenericSolrRequest(
SolrRequest.METHOD.POST, "/config/params", SolrRequest.SolrRequestType.ADMIN, params) {
@Override
public RequestWriter.ContentWriter getContentWriter(String expectedType) {
return new RequestWriter.StringPayloadContentWriter(
"{set:{PKG_VERSIONS:{mypkg : '2.1'}}}", ClientUtils.TEXT_JSON);
}
}.setRequiresCollection(true).process(cluster.getSolrClient());
// now, let's force every collection using 'mypkg' to refresh
// so that it uses version 2.1
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.POST)
.withPayload("{refresh : mypkg}")
.forceV2(true)
.build()
.process(cluster.getSolrClient());
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1");
verifyComponent(
cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1");
plugins.clear();
p = new ConfigPlugin();
p.name = "/rt_2";
p.klass = "mypkg:" + C.class.getName();
plugins.put("create-requesthandler", p);
p = new ConfigPlugin();
p.name = "qp1";
p.klass = "mypkg:" + C2.class.getName();
plugins.put("create-queryparser", p);
v2r =
new V2Request.Builder("/c/" + COLLECTION_NAME + "/config")
.withMethod(SolrRequest.METHOD.POST)
.withPayload(plugins)
.forceV2(true)
.build();
cluster.getSolrClient().request(v2r);
assertTrue(C.informCalled);
assertTrue(C2.informCalled);
// we create a new node. This node does not have the packages. But it should download it from
// another node
JettySolrRunner jetty = cluster.startJettySolrRunner();
// create a new replica for this collection. it should end up
CollectionAdminRequest.addReplicaToShard(COLLECTION_NAME, "shard1")
.setNrtReplicas(1)
.setNode(jetty.getNodeName())
.process(cluster.getSolrClient());
cluster.waitForActiveCollection(COLLECTION_NAME, 2, 5);
checkAllNodesForFile(
cluster, FILE3, Map.of(":files:" + FILE3 + ":name", "runtimelibs_v3.jar"), false);
}
@SuppressWarnings("unchecked")
private void executeReq(
String uri,
JettySolrRunner jetty,
Utils.InputStreamConsumer<?> parser,
Map<String, Object> expected)
throws Exception {
try (HttpSolrClient client = (HttpSolrClient) jetty.newClient()) {
TestDistribFileStore.assertResponseValues(
10,
() ->
NavigableObject.wrap(
HttpClientUtil.executeGET(
client.getHttpClient(), jetty.getBaseUrl() + uri, parser)),
expected);
}
}
private void verifyComponent(
SolrClient client,
String COLLECTION_NAME,
String componentType,
String componentName,
String pkg,
String version)
throws Exception {
SolrParams params =
new MapSolrParams(
Map.of(
"collection",
COLLECTION_NAME,
WT,
JAVABIN,
"componentName",
componentName,
"meta",
"true"));
TestDistribFileStore.assertResponseValues(
10,
client,
new GenericSolrRequest(
SolrRequest.METHOD.GET,
"/config/" + componentType,
SolrRequest.SolrRequestType.ADMIN,
params)
.setRequiresCollection(true),
Map.of(
":config:" + componentType + ":" + componentName + ":_packageinfo_:package", pkg,
":config:" + componentType + ":" + componentName + ":_packageinfo_:version", version));
}
@Test
@SuppressWarnings("unchecked")
public void testAPI() throws Exception {
String errPath = "/error/details[0]/errorMessages[0]";
String FILE1 = "/mypkg/v.0.12/jar_a.jar";
String FILE2 = "/mypkg/v.0.12/jar_b.jar";
String FILE3 = "/mypkg/v.0.13/jar_a.jar";
PackagePayload.AddVersion add = new PackagePayload.AddVersion();
add.version = "0.12";
add.pkg = "test_pkg";
add.files = List.of(FILE1, FILE2);
V2Request req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("add", add))
.build();
// the files are not yet there. The command should fail with error saying "No such file"
expectError(req, cluster.getSolrClient(), errPath, "No such file:");
// post the jar file. No signature is sent
postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, null);
add.files = Collections.singletonList(FILE1);
expectError(req, cluster.getSolrClient(), errPath, FILE1 + " has no signature");
// now we upload the keys
byte[] derFile = readFile("cryptokeys/pub_key512.der");
uploadKey(derFile, ClusterFileStore.KEYS_DIR + "/pub_key512.der", cluster);
// and upload the same file with a different name, but it has proper signature
postFileAndWait(
cluster,
"runtimecode/runtimelibs.jar.bin",
FILE2,
"L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
// with correct signature
// after uploading the file, let's delete the keys to see if we get proper error message
add.files = Collections.singletonList(FILE2);
/*expectError(req, cluster.getSolrClient(), errPath,
"ZooKeeper does not have any public keys");*/
// Now lets' put the keys back
// this time we have a file with proper signature, public keys are in ZK
// so the add {} command should succeed
req.process(cluster.getSolrClient());
// Now verify the data in ZK
TestDistribFileStore.assertResponseValues(
1,
() ->
NavigableObject.wrap(
Utils.fromJSON(
cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))),
Map.of(":packages:test_pkg[0]:version", "0.12", ":packages:test_pkg[0]:files[0]", FILE2));
// post a new jar with a proper signature
postFileAndWait(
cluster,
"runtimecode/runtimelibs_v2.jar.bin",
FILE3,
"j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA==");
// this time we are adding the second version of the package (0.13)
add.version = "0.13";
add.pkg = "test_pkg";
add.files = Collections.singletonList(FILE3);
// this request should succeed
req.process(cluster.getSolrClient());
// no verify the data (/packages.json) in ZK
TestDistribFileStore.assertResponseValues(
1,
() ->
NavigableObject.wrap(
Utils.fromJSON(
cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))),
Map.of(":packages:test_pkg[1]:version", "0.13", ":packages:test_pkg[1]:files[0]", FILE3));
// Now we will just delete one version
PackagePayload.DelVersion delVersion = new PackagePayload.DelVersion();
delVersion.version = "0.1"; // this version does not exist
delVersion.pkg = "test_pkg";
req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("delete", delVersion))
.build();
// we are expecting an error
expectError(req, cluster.getSolrClient(), errPath, "No such version:");
delVersion.version = "0.12"; // correct version. Should succeed
req.process(cluster.getSolrClient());
// Verify with ZK that the data is correct
TestDistribFileStore.assertResponseValues(
1,
() ->
NavigableObject.wrap(
Utils.fromJSON(
cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))),
Map.of(":packages:test_pkg[0]:version", "0.13", ":packages:test_pkg[0]:files[0]", FILE3));
// So far we have been verifying the details with ZK directly
// use the package read API to verify with each node that it has the correct data
for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
String path = jetty.getBaseURLV2().toString() + "/cluster/package?wt=javabin";
TestDistribFileStore.assertResponseValues(
10,
new Callable<NavigableObject>() {
@Override
public NavigableObject call() throws Exception {
try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) {
return (NavigableObject)
HttpClientUtil.executeGET(
solrClient.getHttpClient(), path, Utils.JAVABINCONSUMER);
}
}
},
Map.of(
":result:packages:test_pkg[0]:version",
"0.13",
":result:packages:test_pkg[0]:files[0]",
FILE3));
}
}
public static class C extends RequestHandlerBase implements SolrCoreAware {
static boolean informCalled = false;
@Override
public void inform(SolrCore core) {
informCalled = true;
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {}
@Override
public String getDescription() {
return "test";
}
@Override
public Name getPermissionName(AuthorizationContext request) {
return Name.ALL;
}
}
public static class C2 extends QParserPlugin implements ResourceLoaderAware {
static boolean informCalled = false;
@Override
public void inform(ResourceLoader loader) {
informCalled = true;
}
@Override
public QParser createParser(
String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
return null;
}
}
@Test
public void testSchemaPlugins() throws Exception {
String COLLECTION_NAME = "testSchemaLoadingColl";
System.setProperty("managed.schema.mutable", "true");
IndexSchema[] schemas = new IndexSchema[2]; // tracks schemas for a selected core
String FILE1 = "/schemapkg/schema-plugins.jar";
byte[] derFile = readFile("cryptokeys/pub_key512.der");
uploadKey(derFile, ClusterFileStore.KEYS_DIR + "/pub_key512.der", cluster);
postFileAndWait(
cluster,
"runtimecode/schema-plugins.jar.bin",
FILE1,
"U+AdO/jgY3DtMpeFRGoTQk72iA5g/qjPvdQYPGBaXB5+ggcTZk4FoIWiueB0bwGJ8Mg3V/elxOqEbD2JR8R0tA==");
String FILE2 = "/schemapkg/payload-component.jar";
postFileAndWait(
cluster,
"runtimecode/payload-component.jar.bin",
FILE2,
"gI6vYUDmSXSXmpNEeK1cwqrp4qTeVQgizGQkd8A4Prx2K8k7c5QlXbcs4lxFAAbbdXz9F4esBqTCiLMjVDHJ5Q==");
// upload package v1.0
PackagePayload.AddVersion add = new PackagePayload.AddVersion();
add.version = "1.0";
add.pkg = "schemapkg";
add.files = Arrays.asList(FILE1, FILE2);
V2Request req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("add", add))
.build();
req.process(cluster.getSolrClient());
TestDistribFileStore.assertResponseValues(
10,
() ->
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.GET)
.build()
.process(cluster.getSolrClient()),
Map.of(
":result:packages:schemapkg[0]:version",
"1.0",
":result:packages:schemapkg[0]:files[0]",
FILE1));
CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf1", 2, 2)
.process(cluster.getSolrClient());
cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4);
// make note of the schema instance for one of the cores
SolrCore.Provider coreProvider =
cluster.getJettySolrRunners().stream()
.flatMap(
jetty ->
jetty.getCoreContainer().getAllCoreNames().stream()
.map(name -> new SolrCore.Provider(jetty.getCoreContainer(), name, null)))
.findFirst()
.orElseThrow();
coreProvider.withCore(core -> schemas[0] = core.getLatestSchema());
// upload package v2.0
add = new PackagePayload.AddVersion();
add.version = "2.0";
add.pkg = "schemapkg";
add.files = Arrays.asList(FILE1, FILE2);
req =
new V2Request.Builder("/cluster/package")
.forceV2(true)
.withMethod(SolrRequest.METHOD.POST)
.withPayload(Collections.singletonMap("add", add))
.build();
req.process(cluster.getSolrClient());
TestDistribFileStore.assertResponseValues(
10,
() ->
new V2Request.Builder("/cluster/package")
.withMethod(SolrRequest.METHOD.GET)
.build()
.process(cluster.getSolrClient()),
Map.of(
":result:packages:schemapkg[1]:version",
"2.0",
":result:packages:schemapkg[1]:files[0]",
FILE1));
// even though package version 2.0 uses exactly the same files
// as version 1.0, the core schema should still reload, and
// the core should be associated with a different schema instance
TestDistribFileStore.assertResponseValues(
10,
() -> {
coreProvider.withCore(core -> schemas[1] = core.getLatestSchema());
return params("schemaReloaded", (schemas[0] != schemas[1]) ? "yes" : "no");
},
Map.of("schemaReloaded", "yes"));
// after the reload, the custom field type class now comes from package v2.0
String fieldTypeName = "myNewTextFieldWithAnalyzerClass";
FieldType fieldTypeV1 = schemas[0].getFieldTypeByName(fieldTypeName);
assertEquals("my.pkg.MyTextField", fieldTypeV1.getClass().getCanonicalName());
FieldType fieldTypeV2 = schemas[1].getFieldTypeByName(fieldTypeName);
assertEquals("my.pkg.MyTextField", fieldTypeV2.getClass().getCanonicalName());
assertNotEquals(
"my.pkg.MyTextField classes should be from different classloaders",
fieldTypeV1.getClass(),
fieldTypeV2.getClass());
}
public static void postFileAndWait(
MiniSolrCloudCluster cluster, String fname, String path, String sig) throws Exception {
ByteBuffer fileContent = getFileContent(fname);
@SuppressWarnings("ByteBufferBackingArray") // this is the result of a call to wrap()
String sha512 = DigestUtils.sha512Hex(fileContent.array());
TestDistribFileStore.postFile(
cluster.getSolrClient(), fileContent, path, sig); // has file, but no signature
TestDistribFileStore.checkAllNodesForFile(
cluster, path, Map.of(":files:" + path + ":sha512", sha512), false);
}
private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg)
throws IOException, SolrServerException {
try {
req.process(client);
fail("should have failed with message : " + expectErrorMsg);
} catch (BaseHttpSolrClient.RemoteExecutionException e) {
String msg = Objects.requireNonNullElse(e.getMetaData()._getStr(errPath), "");
assertTrue(
"should have failed with message: " + expectErrorMsg + "actual message : " + msg,
msg.contains(expectErrorMsg));
}
}
public static class BasePatternReplaceCharFilterFactory extends PatternReplaceCharFilterFactory {
public BasePatternReplaceCharFilterFactory(Map<String, String> args) {
super(args);
}
}
public static class BaseWhitespaceTokenizerFactory extends WhitespaceTokenizerFactory {
public BaseWhitespaceTokenizerFactory(Map<String, String> args) {
super(args);
}
}
/*
//copy the jav files to a package and then run the main method
public static void main(String[] args) throws Exception {
persistZip("/tmp/x.jar", MyPatternReplaceCharFilterFactory.class, MyTextField.class, MyWhitespaceTokenizerFactory.class);
}*/
public static ByteBuffer persistZip(String loc, Class<?>... classes) throws IOException {
ByteBuffer jar = generateZip(classes);
try (FileOutputStream fos = new FileOutputStream(loc)) {
fos.write(jar.array(), jar.arrayOffset(), jar.limit());
fos.flush();
}
return jar;
}
public static ByteBuffer generateZip(Class<?>... classes) throws IOException {
Utils.BAOS bos = new Utils.BAOS();
try (ZipOutputStream zipOut = new ZipOutputStream(bos)) {
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class<?> c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = Utils.toByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), b.arrayOffset(), b.limit());
zipOut.closeEntry();
}
}
return bos.getByteBuffer();
}
}
|
googleapis/google-cloud-java | 35,456 | java-private-catalog/proto-google-cloud-private-catalog-v1beta1/src/main/java/com/google/cloud/privatecatalog/v1beta1/SearchCatalogsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privatecatalog/v1beta1/private_catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privatecatalog.v1beta1;
/**
*
*
* <pre>
* Request message for [PrivateCatalog.SearchCatalogs][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchCatalogs].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest}
*/
public final class SearchCatalogsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest)
SearchCatalogsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchCatalogsRequest.newBuilder() to construct.
private SearchCatalogsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchCatalogsRequest() {
resource_ = "";
query_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchCatalogsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchCatalogsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchCatalogsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.class,
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.Builder.class);
}
public static final int RESOURCE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, query_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, query_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest)) {
return super.equals(obj);
}
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest other =
(com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest) obj;
if (!getResource().equals(other.getResource())) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [PrivateCatalog.SearchCatalogs][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchCatalogs].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest)
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchCatalogsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchCatalogsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.class,
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.Builder.class);
}
// Construct using com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resource_ = "";
query_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchCatalogsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest
getDefaultInstanceForType() {
return com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest build() {
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest buildPartial() {
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest result =
new com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resource_ = resource_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.query_ = query_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest) {
return mergeFrom((com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest other) {
if (other
== com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.getDefaultInstance())
return this;
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the resource context. It can be in following formats:
*
* * `projects/{project}`
* * `folders/{folder}`
* * `organizations/{organization}`
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The query to filter the catalogs. The supported queries are:
*
* * Get a single catalog: `name=catalogs/{catalog}`
* </pre>
*
* <code>string query = 2;</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchCatalogs that
* indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest)
private static final com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest();
}
public static com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchCatalogsRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchCatalogsRequest>() {
@java.lang.Override
public SearchCatalogsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchCatalogsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchCatalogsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jackrabbit-oak | 35,457 | oak-lucene/src/main/java/org/apache/lucene/search/similarities/TFIDFSimilarity.java | /*
* COPIED FROM APACHE LUCENE 4.7.2
*
* Git URL: git@github.com:apache/lucene.git, tag: releases/lucene-solr/4.7.2, path: lucene/core/src/java
*
* (see https://issues.apache.org/jira/browse/OAK-10786 for details)
*/
package org.apache.lucene.search.similarities;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.util.BytesRef;
/**
* Implementation of {@link Similarity} with the Vector Space Model.
* <p>
* Expert: Scoring API.
* <p>TFIDFSimilarity defines the components of Lucene scoring.
* Overriding computation of these components is a convenient
* way to alter Lucene scoring.
*
* <p>Suggested reading:
* <a href="http://nlp.stanford.edu/IR-book/html/htmledition/queries-as-vectors-1.html">
* Introduction To Information Retrieval, Chapter 6</a>.
*
* <p>The following describes how Lucene scoring evolves from
* underlying information retrieval models to (efficient) implementation.
* We first brief on <i>VSM Score</i>,
* then derive from it <i>Lucene's Conceptual Scoring Formula</i>,
* from which, finally, evolves <i>Lucene's Practical Scoring Function</i>
* (the latter is connected directly with Lucene classes and methods).
*
* <p>Lucene combines
* <a href="http://en.wikipedia.org/wiki/Standard_Boolean_model">
* Boolean model (BM) of Information Retrieval</a>
* with
* <a href="http://en.wikipedia.org/wiki/Vector_Space_Model">
* Vector Space Model (VSM) of Information Retrieval</a> -
* documents "approved" by BM are scored by VSM.
*
* <p>In VSM, documents and queries are represented as
* weighted vectors in a multi-dimensional space,
* where each distinct index term is a dimension,
* and weights are
* <a href="http://en.wikipedia.org/wiki/Tfidf">Tf-idf</a> values.
*
* <p>VSM does not require weights to be <i>Tf-idf</i> values,
* but <i>Tf-idf</i> values are believed to produce search results of high quality,
* and so Lucene is using <i>Tf-idf</i>.
* <i>Tf</i> and <i>Idf</i> are described in more detail below,
* but for now, for completion, let's just say that
* for given term <i>t</i> and document (or query) <i>x</i>,
* <i>Tf(t,x)</i> varies with the number of occurrences of term <i>t</i> in <i>x</i>
* (when one increases so does the other) and
* <i>idf(t)</i> similarly varies with the inverse of the
* number of index documents containing term <i>t</i>.
*
* <p><i>VSM score</i> of document <i>d</i> for query <i>q</i> is the
* <a href="http://en.wikipedia.org/wiki/Cosine_similarity">
* Cosine Similarity</a>
* of the weighted query vectors <i>V(q)</i> and <i>V(d)</i>:
*
* <br> <br>
* <table cellpadding="2" cellspacing="2" border="0" align="center" style="width:auto">
* <tr><td>
* <table cellpadding="1" cellspacing="0" border="1" align="center">
* <tr><td>
* <table cellpadding="2" cellspacing="2" border="0" align="center">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* cosine-similarity(q,d) =
* </td>
* <td valign="middle" align="center">
* <table>
* <tr><td align="center" style="text-align: center"><small>V(q) · V(d)</small></td></tr>
* <tr><td align="center" style="text-align: center">–––––––––</td></tr>
* <tr><td align="center" style="text-align: center"><small>|V(q)| |V(d)|</small></td></tr>
* </table>
* </td>
* </tr>
* </table>
* </td></tr>
* </table>
* </td></tr>
* <tr><td>
* <center><font size=-1><u>VSM Score</u></font></center>
* </td></tr>
* </table>
* <br> <br>
*
*
* Where <i>V(q)</i> · <i>V(d)</i> is the
* <a href="http://en.wikipedia.org/wiki/Dot_product">dot product</a>
* of the weighted vectors,
* and <i>|V(q)|</i> and <i>|V(d)|</i> are their
* <a href="http://en.wikipedia.org/wiki/Euclidean_norm#Euclidean_norm">Euclidean norms</a>.
*
* <p>Note: the above equation can be viewed as the dot product of
* the normalized weighted vectors, in the sense that dividing
* <i>V(q)</i> by its euclidean norm is normalizing it to a unit vector.
*
* <p>Lucene refines <i>VSM score</i> for both search quality and usability:
* <ul>
* <li>Normalizing <i>V(d)</i> to the unit vector is known to be problematic in that
* it removes all document length information.
* For some documents removing this info is probably ok,
* e.g. a document made by duplicating a certain paragraph <i>10</i> times,
* especially if that paragraph is made of distinct terms.
* But for a document which contains no duplicated paragraphs,
* this might be wrong.
* To avoid this problem, a different document length normalization
* factor is used, which normalizes to a vector equal to or larger
* than the unit vector: <i>doc-len-norm(d)</i>.
* </li>
*
* <li>At indexing, users can specify that certain documents are more
* important than others, by assigning a document boost.
* For this, the score of each document is also multiplied by its boost value
* <i>doc-boost(d)</i>.
* </li>
*
* <li>Lucene is field based, hence each query term applies to a single
* field, document length normalization is by the length of the certain field,
* and in addition to document boost there are also document fields boosts.
* </li>
*
* <li>The same field can be added to a document during indexing several times,
* and so the boost of that field is the multiplication of the boosts of
* the separate additions (or parts) of that field within the document.
* </li>
*
* <li>At search time users can specify boosts to each query, sub-query, and
* each query term, hence the contribution of a query term to the score of
* a document is multiplied by the boost of that query term <i>query-boost(q)</i>.
* </li>
*
* <li>A document may match a multi term query without containing all
* the terms of that query (this is correct for some of the queries),
* and users can further reward documents matching more query terms
* through a coordination factor, which is usually larger when
* more terms are matched: <i>coord-factor(q,d)</i>.
* </li>
* </ul>
*
* <p>Under the simplifying assumption of a single field in the index,
* we get <i>Lucene's Conceptual scoring formula</i>:
*
* <br> <br>
* <table cellpadding="2" cellspacing="2" border="0" align="center" style="width:auto">
* <tr><td>
* <table cellpadding="1" cellspacing="0" border="1" align="center">
* <tr><td>
* <table cellpadding="2" cellspacing="2" border="0" align="center">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* score(q,d) =
* <font color="#FF9933">coord-factor(q,d)</font> ·
* <font color="#CCCC00">query-boost(q)</font> ·
* </td>
* <td valign="middle" align="center">
* <table>
* <tr><td align="center" style="text-align: center"><small><font color="#993399">V(q) · V(d)</font></small></td></tr>
* <tr><td align="center" style="text-align: center">–––––––––</td></tr>
* <tr><td align="center" style="text-align: center"><small><font color="#FF33CC">|V(q)|</font></small></td></tr>
* </table>
* </td>
* <td valign="middle" align="right" rowspan="1">
* · <font color="#3399FF">doc-len-norm(d)</font>
* · <font color="#3399FF">doc-boost(d)</font>
* </td>
* </tr>
* </table>
* </td></tr>
* </table>
* </td></tr>
* <tr><td>
* <center><font size=-1><u>Lucene Conceptual Scoring Formula</u></font></center>
* </td></tr>
* </table>
* <br> <br>
*
* <p>The conceptual formula is a simplification in the sense that (1) terms and documents
* are fielded and (2) boosts are usually per query term rather than per query.
*
* <p>We now describe how Lucene implements this conceptual scoring formula, and
* derive from it <i>Lucene's Practical Scoring Function</i>.
*
* <p>For efficient score computation some scoring components
* are computed and aggregated in advance:
*
* <ul>
* <li><i>Query-boost</i> for the query (actually for each query term)
* is known when search starts.
* </li>
*
* <li>Query Euclidean norm <i>|V(q)|</i> can be computed when search starts,
* as it is independent of the document being scored.
* From search optimization perspective, it is a valid question
* why bother to normalize the query at all, because all
* scored documents will be multiplied by the same <i>|V(q)|</i>,
* and hence documents ranks (their order by score) will not
* be affected by this normalization.
* There are two good reasons to keep this normalization:
* <ul>
* <li>Recall that
* <a href="http://en.wikipedia.org/wiki/Cosine_similarity">
* Cosine Similarity</a> can be used find how similar
* two documents are. One can use Lucene for e.g.
* clustering, and use a document as a query to compute
* its similarity to other documents.
* In this use case it is important that the score of document <i>d3</i>
* for query <i>d1</i> is comparable to the score of document <i>d3</i>
* for query <i>d2</i>. In other words, scores of a document for two
* distinct queries should be comparable.
* There are other applications that may require this.
* And this is exactly what normalizing the query vector <i>V(q)</i>
* provides: comparability (to a certain extent) of two or more queries.
* </li>
*
* <li>Applying query normalization on the scores helps to keep the
* scores around the unit vector, hence preventing loss of score data
* because of floating point precision limitations.
* </li>
* </ul>
* </li>
*
* <li>Document length norm <i>doc-len-norm(d)</i> and document
* boost <i>doc-boost(d)</i> are known at indexing time.
* They are computed in advance and their multiplication
* is saved as a single value in the index: <i>norm(d)</i>.
* (In the equations below, <i>norm(t in d)</i> means <i>norm(field(t) in doc d)</i>
* where <i>field(t)</i> is the field associated with term <i>t</i>.)
* </li>
* </ul>
*
* <p><i>Lucene's Practical Scoring Function</i> is derived from the above.
* The color codes demonstrate how it relates
* to those of the <i>conceptual</i> formula:
*
* <P>
* <table cellpadding="2" cellspacing="2" border="0" align="center" style="width:auto">
* <tr><td>
* <table cellpadding="" cellspacing="2" border="2" align="center">
* <tr><td>
* <table cellpadding="2" cellspacing="2" border="0" align="center">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* score(q,d) =
* <A HREF="#formula_coord"><font color="#FF9933">coord(q,d)</font></A> ·
* <A HREF="#formula_queryNorm"><font color="#FF33CC">queryNorm(q)</font></A> ·
* </td>
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
* <big><big><big>∑</big></big></big>
* </td>
* <td valign="middle" align="right" rowspan="1">
* <big><big>(</big></big>
* <A HREF="#formula_tf"><font color="#993399">tf(t in d)</font></A> ·
* <A HREF="#formula_idf"><font color="#993399">idf(t)</font></A><sup>2</sup> ·
* <A HREF="#formula_termBoost"><font color="#CCCC00">t.getBoost()</font></A> ·
* <A HREF="#formula_norm"><font color="#3399FF">norm(t,d)</font></A>
* <big><big>)</big></big>
* </td>
* </tr>
* <tr valigh="top">
* <td></td>
* <td align="center" style="text-align: center"><small>t in q</small></td>
* <td></td>
* </tr>
* </table>
* </td></tr>
* </table>
* </td></tr>
* <tr><td>
* <center><font size=-1><u>Lucene Practical Scoring Function</u></font></center>
* </td></tr>
* </table>
*
* <p> where
* <ol>
* <li>
* <A NAME="formula_tf"></A>
* <b><i>tf(t in d)</i></b>
* correlates to the term's <i>frequency</i>,
* defined as the number of times term <i>t</i> appears in the currently scored document <i>d</i>.
* Documents that have more occurrences of a given term receive a higher score.
* Note that <i>tf(t in q)</i> is assumed to be <i>1</i> and therefore it does not appear in this equation,
* However if a query contains twice the same term, there will be
* two term-queries with that same term and hence the computation would still be correct (although
* not very efficient).
* The default computation for <i>tf(t in d)</i> in
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#tf(float) DefaultSimilarity} is:
*
* <br> <br>
* <table cellpadding="2" cellspacing="2" border="0" align="center" style="width:auto">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#tf(float) tf(t in d)} =
* </td>
* <td valign="top" align="center" rowspan="1">
* frequency<sup><big>½</big></sup>
* </td>
* </tr>
* </table>
* <br> <br>
* </li>
*
* <li>
* <A NAME="formula_idf"></A>
* <b><i>idf(t)</i></b> stands for Inverse Document Frequency. This value
* correlates to the inverse of <i>docFreq</i>
* (the number of documents in which the term <i>t</i> appears).
* This means rarer terms give higher contribution to the total score.
* <i>idf(t)</i> appears for <i>t</i> in both the query and the document,
* hence it is squared in the equation.
* The default computation for <i>idf(t)</i> in
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#idf(long, long) DefaultSimilarity} is:
*
* <br> <br>
* <table cellpadding="2" cellspacing="2" border="0" align="center" style="width:auto">
* <tr>
* <td valign="middle" align="right">
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#idf(long, long) idf(t)} =
* </td>
* <td valign="middle" align="center">
* 1 + log <big>(</big>
* </td>
* <td valign="middle" align="center">
* <table>
* <tr><td align="center" style="text-align: center"><small>numDocs</small></td></tr>
* <tr><td align="center" style="text-align: center">–––––––––</td></tr>
* <tr><td align="center" style="text-align: center"><small>docFreq+1</small></td></tr>
* </table>
* </td>
* <td valign="middle" align="center">
* <big>)</big>
* </td>
* </tr>
* </table>
* <br> <br>
* </li>
*
* <li>
* <A NAME="formula_coord"></A>
* <b><i>coord(q,d)</i></b>
* is a score factor based on how many of the query terms are found in the specified document.
* Typically, a document that contains more of the query's terms will receive a higher score
* than another document with fewer query terms.
* This is a search time factor computed in
* {@link #coord(int, int) coord(q,d)}
* by the Similarity in effect at search time.
* <br> <br>
* </li>
*
* <li><b>
* <A NAME="formula_queryNorm"></A>
* <i>queryNorm(q)</i>
* </b>
* is a normalizing factor used to make scores between queries comparable.
* This factor does not affect document ranking (since all ranked documents are multiplied by the same factor),
* but rather just attempts to make scores from different queries (or even different indexes) comparable.
* This is a search time factor computed by the Similarity in effect at search time.
*
* The default computation in
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#queryNorm(float) DefaultSimilarity}
* produces a <a href="http://en.wikipedia.org/wiki/Euclidean_norm#Euclidean_norm">Euclidean norm</a>:
* <br> <br>
* <table cellpadding="1" cellspacing="0" border="0" align="center" style="width:auto">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* queryNorm(q) =
* {@link org.apache.lucene.search.similarities.DefaultSimilarity#queryNorm(float) queryNorm(sumOfSquaredWeights)}
* =
* </td>
* <td valign="middle" align="center" rowspan="1">
* <table>
* <tr><td align="center" style="text-align: center"><big>1</big></td></tr>
* <tr><td align="center" style="text-align: center"><big>
* ––––––––––––––
* </big></td></tr>
* <tr><td align="center" style="text-align: center">sumOfSquaredWeights<sup><big>½</big></sup></td></tr>
* </table>
* </td>
* </tr>
* </table>
* <br> <br>
*
* The sum of squared weights (of the query terms) is
* computed by the query {@link org.apache.lucene.search.Weight} object.
* For example, a {@link org.apache.lucene.search.BooleanQuery}
* computes this value as:
*
* <br> <br>
* <table cellpadding="1" cellspacing="0" border="0" align="center" style="width:auto">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* {@link org.apache.lucene.search.Weight#getValueForNormalization() sumOfSquaredWeights} =
* {@link org.apache.lucene.search.Query#getBoost() q.getBoost()} <sup><big>2</big></sup>
* ·
* </td>
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
* <big><big><big>∑</big></big></big>
* </td>
* <td valign="middle" align="right" rowspan="1">
* <big><big>(</big></big>
* <A HREF="#formula_idf">idf(t)</A> ·
* <A HREF="#formula_termBoost">t.getBoost()</A>
* <big><big>) <sup>2</sup> </big></big>
* </td>
* </tr>
* <tr valigh="top">
* <td></td>
* <td align="center" style="text-align: center"><small>t in q</small></td>
* <td></td>
* </tr>
* </table>
* <br> <br>
*
* </li>
*
* <li>
* <A NAME="formula_termBoost"></A>
* <b><i>t.getBoost()</i></b>
* is a search time boost of term <i>t</i> in the query <i>q</i> as
* specified in the query text
* (see <A HREF="{@docRoot}/../queryparser/org/apache/lucene/queryparser/classic/package-summary.html#Boosting_a_Term">query syntax</A>),
* or as set by application calls to
* {@link org.apache.lucene.search.Query#setBoost(float) setBoost()}.
* Notice that there is really no direct API for accessing a boost of one term in a multi term query,
* but rather multi terms are represented in a query as multi
* {@link org.apache.lucene.search.TermQuery TermQuery} objects,
* and so the boost of a term in the query is accessible by calling the sub-query
* {@link org.apache.lucene.search.Query#getBoost() getBoost()}.
* <br> <br>
* </li>
*
* <li>
* <A NAME="formula_norm"></A>
* <b><i>norm(t,d)</i></b> encapsulates a few (indexing time) boost and length factors:
*
* <ul>
* <li><b>Field boost</b> - set by calling
* {@link org.apache.lucene.document.Field#setBoost(float) field.setBoost()}
* before adding the field to a document.
* </li>
* <li><b>lengthNorm</b> - computed
* when the document is added to the index in accordance with the number of tokens
* of this field in the document, so that shorter fields contribute more to the score.
* LengthNorm is computed by the Similarity class in effect at indexing.
* </li>
* </ul>
* The {@link #computeNorm} method is responsible for
* combining all of these factors into a single float.
*
* <p>
* When a document is added to the index, all the above factors are multiplied.
* If the document has multiple fields with the same name, all their boosts are multiplied together:
*
* <br> <br>
* <table cellpadding="1" cellspacing="0" border="0" align="center" style="width:auto">
* <tr>
* <td valign="middle" align="right" rowspan="1">
* norm(t,d) =
* lengthNorm
* ·
* </td>
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
* <big><big><big>∏</big></big></big>
* </td>
* <td valign="middle" align="right" rowspan="1">
* {@link org.apache.lucene.index.IndexableField#boost() f.boost}()
* </td>
* </tr>
* <tr valigh="top">
* <td></td>
* <td align="center" style="text-align: center"><small>field <i><b>f</b></i> in <i>d</i> named as <i><b>t</b></i></small></td>
* <td></td>
* </tr>
* </table>
* Note that search time is too late to modify this <i>norm</i> part of scoring,
* e.g. by using a different {@link Similarity} for search.
* </li>
* </ol>
*
* @see org.apache.lucene.index.IndexWriterConfig#setSimilarity(Similarity)
* @see IndexSearcher#setSimilarity(Similarity)
*/
public abstract class TFIDFSimilarity extends Similarity {
/**
* Sole constructor. (For invocation by subclass
* constructors, typically implicit.)
*/
public TFIDFSimilarity() {}
/** Computes a score factor based on the fraction of all query terms that a
* document contains. This value is multiplied into scores.
*
* <p>The presence of a large portion of the query terms indicates a better
* match with the query, so implementations of this method usually return
* larger values when the ratio between these parameters is large and smaller
* values when the ratio between them is small.
*
* @param overlap the number of query terms matched in the document
* @param maxOverlap the total number of terms in the query
* @return a score factor based on term overlap with the query
*/
@Override
public abstract float coord(int overlap, int maxOverlap);
/** Computes the normalization value for a query given the sum of the squared
* weights of each of the query terms. This value is multiplied into the
* weight of each query term. While the classic query normalization factor is
* computed as 1/sqrt(sumOfSquaredWeights), other implementations might
* completely ignore sumOfSquaredWeights (ie return 1).
*
* <p>This does not affect ranking, but the default implementation does make scores
* from different queries more comparable than they would be by eliminating the
* magnitude of the Query vector as a factor in the score.
*
* @param sumOfSquaredWeights the sum of the squares of query term weights
* @return a normalization factor for query weights
*/
@Override
public abstract float queryNorm(float sumOfSquaredWeights);
/** Computes a score factor based on a term or phrase's frequency in a
* document. This value is multiplied by the {@link #idf(long, long)}
* factor for each term in the query and these products are then summed to
* form the initial score for a document.
*
* <p>Terms and phrases repeated in a document indicate the topic of the
* document, so implementations of this method usually return larger values
* when <code>freq</code> is large, and smaller values when <code>freq</code>
* is small.
*
* @param freq the frequency of a term within a document
* @return a score factor based on a term's within-document frequency
*/
public abstract float tf(float freq);
/**
* Computes a score factor for a simple term and returns an explanation
* for that score factor.
*
* <p>
* The default implementation uses:
*
* <pre class="prettyprint">
* idf(docFreq, searcher.maxDoc());
* </pre>
*
* Note that {@link CollectionStatistics#maxDoc()} is used instead of
* {@link org.apache.lucene.index.IndexReader#numDocs() IndexReader#numDocs()} because also
* {@link TermStatistics#docFreq()} is used, and when the latter
* is inaccurate, so is {@link CollectionStatistics#maxDoc()}, and in the same direction.
* In addition, {@link CollectionStatistics#maxDoc()} is more efficient to compute
*
* @param collectionStats collection-level statistics
* @param termStats term-level statistics for the term
* @return an Explain object that includes both an idf score factor
and an explanation for the term.
*/
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats) {
final long df = termStats.docFreq();
final long max = collectionStats.maxDoc();
final float idf = idf(df, max);
return new Explanation(idf, "idf(docFreq=" + df + ", maxDocs=" + max + ")");
}
/**
* Computes a score factor for a phrase.
*
* <p>
* The default implementation sums the idf factor for
* each term in the phrase.
*
* @param collectionStats collection-level statistics
* @param termStats term-level statistics for the terms in the phrase
* @return an Explain object that includes both an idf
* score factor for the phrase and an explanation
* for each term.
*/
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
final long max = collectionStats.maxDoc();
float idf = 0.0f;
final Explanation exp = new Explanation();
exp.setDescription("idf(), sum of:");
for (final TermStatistics stat : termStats ) {
final long df = stat.docFreq();
final float termIdf = idf(df, max);
exp.addDetail(new Explanation(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")"));
idf += termIdf;
}
exp.setValue(idf);
return exp;
}
/** Computes a score factor based on a term's document frequency (the number
* of documents which contain the term). This value is multiplied by the
* {@link #tf(float)} factor for each term in the query and these products are
* then summed to form the initial score for a document.
*
* <p>Terms that occur in fewer documents are better indicators of topic, so
* implementations of this method usually return larger values for rare terms,
* and smaller values for common terms.
*
* @param docFreq the number of documents which contain the term
* @param numDocs the total number of documents in the collection
* @return a score factor based on the term's document frequency
*/
public abstract float idf(long docFreq, long numDocs);
/**
* Compute an index-time normalization value for this field instance.
* <p>
* This value will be stored in a single byte lossy representation by
* {@link #encodeNormValue(float)}.
*
* @param state statistics of the current field (such as length, boost, etc)
* @return an index-time normalization value
*/
public abstract float lengthNorm(FieldInvertState state);
@Override
public final long computeNorm(FieldInvertState state) {
float normValue = lengthNorm(state);
return encodeNormValue(normValue);
}
/**
* Decodes a normalization factor stored in an index.
*
* @see #encodeNormValue(float)
*/
public abstract float decodeNormValue(long norm);
/** Encodes a normalization factor for storage in an index. */
public abstract long encodeNormValue(float f);
/** Computes the amount of a sloppy phrase match, based on an edit distance.
* This value is summed for each sloppy phrase match in a document to form
* the frequency to be used in scoring instead of the exact term count.
*
* <p>A phrase match with a small edit distance to a document passage more
* closely matches the document, so implementations of this method usually
* return larger values when the edit distance is small and smaller values
* when it is large.
*
* @see PhraseQuery#setSlop(int)
* @param distance the edit distance of this sloppy phrase match
* @return the frequency increment for this match
*/
public abstract float sloppyFreq(int distance);
/**
* Calculate a scoring factor based on the data in the payload. Implementations
* are responsible for interpreting what is in the payload. Lucene makes no assumptions about
* what is in the byte array.
*
* @param doc The docId currently being scored.
* @param start The start position of the payload
* @param end The end position of the payload
* @param payload The payload byte array to be scored
* @return An implementation dependent float to be used as a scoring factor
*/
public abstract float scorePayload(int doc, int start, int end, BytesRef payload);
@Override
public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
final Explanation idf = termStats.length == 1
? idfExplain(collectionStats, termStats[0])
: idfExplain(collectionStats, termStats);
return new IDFStats(collectionStats.field(), idf, queryBoost);
}
@Override
public final SimScorer simScorer(SimWeight stats, AtomicReaderContext context) throws IOException {
IDFStats idfstats = (IDFStats) stats;
return new TFIDFSimScorer(idfstats, context.reader().getNormValues(idfstats.field));
}
private final class TFIDFSimScorer extends SimScorer {
private final IDFStats stats;
private final float weightValue;
private final NumericDocValues norms;
TFIDFSimScorer(IDFStats stats, NumericDocValues norms) throws IOException {
this.stats = stats;
this.weightValue = stats.value;
this.norms = norms;
}
@Override
public float score(int doc, float freq) {
final float raw = tf(freq) * weightValue; // compute tf(f)*weight
return norms == null ? raw : raw * decodeNormValue(norms.get(doc)); // normalize for field
}
@Override
public float computeSlopFactor(int distance) {
return sloppyFreq(distance);
}
@Override
public float computePayloadFactor(int doc, int start, int end, BytesRef payload) {
return scorePayload(doc, start, end, payload);
}
@Override
public Explanation explain(int doc, Explanation freq) {
return explainScore(doc, freq, stats, norms);
}
}
/** Collection statistics for the TF-IDF model. The only statistic of interest
* to this model is idf. */
private static class IDFStats extends SimWeight {
private final String field;
/** The idf and its explanation */
private final Explanation idf;
private float queryNorm;
private float queryWeight;
private final float queryBoost;
private float value;
public IDFStats(String field, Explanation idf, float queryBoost) {
// TODO: Validate?
this.field = field;
this.idf = idf;
this.queryBoost = queryBoost;
this.queryWeight = idf.getValue() * queryBoost; // compute query weight
}
@Override
public float getValueForNormalization() {
// TODO: (sorta LUCENE-1907) make non-static class and expose this squaring via a nice method to subclasses?
return queryWeight * queryWeight; // sum of squared weights
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
this.queryNorm = queryNorm * topLevelBoost;
queryWeight *= this.queryNorm; // normalize query weight
value = queryWeight * idf.getValue(); // idf for document
}
}
private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
Explanation result = new Explanation();
result.setDescription("score(doc="+doc+",freq="+freq+"), product of:");
// explain query weight
Explanation queryExpl = new Explanation();
queryExpl.setDescription("queryWeight, product of:");
Explanation boostExpl = new Explanation(stats.queryBoost, "boost");
if (stats.queryBoost != 1.0f)
queryExpl.addDetail(boostExpl);
queryExpl.addDetail(stats.idf);
Explanation queryNormExpl = new Explanation(stats.queryNorm,"queryNorm");
queryExpl.addDetail(queryNormExpl);
queryExpl.setValue(boostExpl.getValue() *
stats.idf.getValue() *
queryNormExpl.getValue());
result.addDetail(queryExpl);
// explain field weight
Explanation fieldExpl = new Explanation();
fieldExpl.setDescription("fieldWeight in "+doc+
", product of:");
Explanation tfExplanation = new Explanation();
tfExplanation.setValue(tf(freq.getValue()));
tfExplanation.setDescription("tf(freq="+freq.getValue()+"), with freq of:");
tfExplanation.addDetail(freq);
fieldExpl.addDetail(tfExplanation);
fieldExpl.addDetail(stats.idf);
Explanation fieldNormExpl = new Explanation();
float fieldNorm = norms != null ? decodeNormValue(norms.get(doc)) : 1.0f;
fieldNormExpl.setValue(fieldNorm);
fieldNormExpl.setDescription("fieldNorm(doc="+doc+")");
fieldExpl.addDetail(fieldNormExpl);
fieldExpl.setValue(tfExplanation.getValue() *
stats.idf.getValue() *
fieldNormExpl.getValue());
result.addDetail(fieldExpl);
// combine them
result.setValue(queryExpl.getValue() * fieldExpl.getValue());
if (queryExpl.getValue() == 1.0f)
return fieldExpl;
return result;
}
}
|
apache/pinot | 35,162 | pinot-core/src/test/java/org/apache/pinot/core/operator/transform/transformer/datetime/DateTimeConverterTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.operator.transform.transformer.datetime;
import java.util.ArrayList;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class DateTimeConverterTest {
@SuppressWarnings("unchecked")
@Test(dataProvider = "testDateTimeConversion")
public void testDateTimeConversion(String inputFormat, String outputFormat, String outputGranularity, Object input,
Object expected) {
BaseDateTimeTransformer converter =
DateTimeTransformerFactory.getDateTimeTransformer(inputFormat, outputFormat, outputGranularity);
int length;
Object output;
if (expected instanceof long[]) {
length = ((long[]) expected).length;
output = new long[length];
} else {
length = ((String[]) expected).length;
output = new String[length];
}
converter.transform(input, output, length);
Assert.assertEquals(output, expected);
}
@Test(dataProvider = "testDateTimeConversion")
public void testDateTimeConversionWithBucketingTimeZone(String inputFormat, String outputFormat,
String outputGranularity, Object input, Object expected) {
BaseDateTimeTransformer converter =
DateTimeTransformerFactory.getDateTimeTransformer(inputFormat, outputFormat, outputGranularity);
int length;
Object output;
if (expected instanceof long[]) {
length = ((long[]) expected).length;
output = new long[length];
} else {
length = ((String[]) expected).length;
output = new String[length];
}
converter.transform(input, output, length);
Assert.assertEquals(output, expected);
}
@Test(dataProvider = "testConversionWithBucketTimeZone")
public void testConversionWithBucketTimeZone(String inputFormat, String outputFormat,
String outputGranularity, String bucketTimeZone, Object input, Object expected) {
BaseDateTimeTransformer converter =
DateTimeTransformerFactory.getDateTimeTransformer(inputFormat, outputFormat, outputGranularity, bucketTimeZone);
int length;
Object output;
if (expected instanceof long[]) {
length = ((long[]) expected).length;
output = new long[length];
} else {
length = ((String[]) expected).length;
output = new String[length];
}
converter.transform(input, output, length);
// cast to array, otherwise assert produces garbage error messages
if (expected instanceof long[]) {
Assert.assertEquals((long[]) output, (long[]) expected);
} else {
Assert.assertEquals((String[]) output, (String[]) expected);
}
}
@DataProvider(name = "testConversionWithBucketTimeZone")
public Object[][] testConversionWithBucketTimeZone() {
List<Object[]> entries = new ArrayList<>();
/*************** Epoch to Epoch ***************/
{
// Test bucketing to 15 minutes
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898000000L /* 20170920T02:00:00 */, 1505898900000L
/* 20170920T02:15:00 */
};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:EPOCH", "15:MINUTES", "CET", input, expected});
}
{
// Test input which should create no change
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
entries.add(
new Object[]{"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:EPOCH", "1:MILLISECONDS", "CET", input, expected});
}
{
// Test conversion from millis to hours
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505902560000L
/* 20170920T03:16:00 */
};
long[] expected =
{418305L /* 20170920T02:00:00 */, 418305L /* 20170920T02:00:00 */, 418306L /* 20170920T03:00:00 */};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:HOURS:EPOCH", "1:HOURS", "CET", input, expected});
}
{
// Test conversion from 5 minutes to hours
long[] input =
{5019660L /* 20170920T02:00:00 */, 5019661L /* 20170920T02:05:00 */, 5019675L /* 20170920T03:15:00 */};
long[] expected =
{418305L /* 20170920T02:00:00 */, 418305L /* 20170920T02:00:00 */, 418306L /* 20170920T03:00:00 */};
entries.add(new Object[]{"5:MINUTES:EPOCH", "1:HOURS:EPOCH", "1:HOURS", "CET", input, expected});
}
{
// Test conversion from 5 minutes to millis and bucketing to hours
long[] input =
{5019660L /* 20170920T02:00:00 */, 5019661L /* 20170920T02:05:00 */, 5019675L /* 20170920T03:15:00 */};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898000000L /* 20170920T02:00:00 */, 1505901600000L
/* 20170920T03:00:00 */
};
entries.add(new Object[]{"5:MINUTES:EPOCH", "1:MILLISECONDS:EPOCH", "1:HOURS", "CET", input, expected});
}
{
// Test conversion to non-java time unit WEEKS
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505199600000L /* 20170912T00:00:00 */, 1504257300000L
/* 20170901T00:20:00 */
};
long[] expected = {2489L, 2488L, 2487L};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:WEEKS:EPOCH", "1:MILLISECONDS", "CET", input, expected});
}
/*************** Epoch to SDF ***************/
{
// Test conversion from millis since epoch to simple date format (UTC)
long[] input = {
1505890800000L /* 20170920T00:00:00 */, 1505962800000L /* 20170920T20:00:00 */, 1505985360000L
/* 20170921T02:16:00 */
};
String[] expected = {"20170919", "20170920", "20170920"};
entries
.add(
new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:DAYS", "CET", input,
expected
});
}
{
// Test conversion from millis since epoch to simple date format (Pacific timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505952000000L /* 20170920T17:00:00 */, 1505962800000L
/* 20170920T20:00:00 */
};
String[] expected = {"20170919", "20170920", "20170920"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/Los_Angeles)", "1:DAYS", "CET",
input,
expected
});
}
{
// Test conversion from millis since epoch to simple date format (IST)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505941200000L /* 20170920T14:00:00 */, 1505962800000L
/* 20170921T03:00:00 */
};
String[] expected = {"20170920", "20170920", "20170921"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(IST)", "1:DAYS", "CET", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (Pacific timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505952000000L /* 20170920T17:00:00 */, 1505962800000L
/* 20170920T20:00:00 */
};
String[] expected = {"2017092002", "2017092017", "2017092020"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/Los_Angeles)", "1:HOURS", "CET",
input,
expected
});
}
{
// Test conversion from millis since epoch to simple date format (East Coast timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505941200000L /* 20170920T14:00:00 */, 1505970000000L
/* 20170920T22:00:00 */
};
String[] expected = {"2017092005", "2017092017", "2017092101"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/New_York)", "1:HOURS", "CET",
input,
expected
});
}
// additional granularity tests
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 15 second
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523560589000L /* 20180412T19:16:29 */, 1523560632000L
/* 20180412T19:17:12 */
};
String[] expected = {"2018-04-12 13:16:30.000", "2018-04-12 13:16:15.000", "2018-04-12 13:17:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"15:SECONDS", "CET", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 3 minute
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523560708000L /* 20180412T19:18:28 */, 1523561708000L
/* 20180412T19:35:08 */
};
String[] expected = {"2018-04-12 13:15:00.000", "2018-04-12 13:18:00.000", "2018-04-12 13:33:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"3:MINUTES", "CET", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 12 hour
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523460502000L /* 20180411T15:28:22 */, 1523430205000L
/* 20180411T07:03:25 */
};
String[] expected = {"2018-04-12 04:00:00.000", "2018-04-11 04:00:00.000", "2018-04-10 16:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"12:HOURS", "CET", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 5 day
// granularity
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1524160502000L /* 20180419T17:55:02 */, 1522230205000L
/* 20180328T09:43:25 */
};
String[] expected = {"2018-04-10 16:00:00.000", "2018-04-15 16:00:00.000", "2018-03-25 16:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"5:DAYS", "CET", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Los_Angeles timezone with 1 day
// granularity)
long[] input = {1524045600000L /* 20180418T10:00:00 */, 1524013200000L /* 20180418T01:00:00 */};
String[] expected = {"2018-04-17 15:00:00.000", "2018-04-17 15:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Los_Angeles)",
"1:DAYS", "CET", input, expected
});
}
/*************** SDF to Epoch ***************/
{
// Test conversion from simple date format to millis since epoch
String[] input =
{"20170920" /* 20170920T00:00:00 */, "20170601" /* 20170601T00:00:00 */, "20170921" /* 20170921T00:00:00 */};
long[] expected = {
1505858400000L /* 20170920T00:00:00 */, 1496268000000L /* 20170601T00:00:00 */, 1505944800000L
/* 20170921T00:00:00 */
};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS:EPOCH", "1:DAYS", "CET", input, expected
});
}
{
// Test conversion from simple date format (East Coast timezone) to millis since epoch
// Converted to
String[] input =
{"20170920" /* 20170920T00:00:00 */, "20170601" /* 20170601T00:00:00 */, "20170921" /* 20170921T00:00:00 */};
long[] expected = {
1505858400000L /* 20170920T00:00:00 */, 1496268000000L /* 20170601T00:00:00 */, 1505944800000L
/* 20170921T00:00:00 */
};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/New_York)", "1:MILLISECONDS:EPOCH", "1:DAYS", "CET", input,
expected
});
}
{
// Test conversion from simple date format (East Coast timezone) to millis since epoch
// Converted to
String[] input = {
"2017092013" /* 20170920T00:00:00 */, "2017092001" /* 20170601T00:00:00 */, "2017092000"
/* 20170921T00:00:00 */
};
long[] expected = {
1505926800000L /* 20170920T13:00:00 Eastern */, 1505883600000L /* 20170920T01:00:00 Eastern */, 1505880000000L
/* 20170920T00:00:00 Eastern */
};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/New_York)", "1:MILLISECONDS:EPOCH", "1:HOURS", "CET", input,
expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch
String[] input = {"2017092013 America/New_York", "2017092004 Asia/Kolkata", "2017092000 America/Los_Angeles"};
long[] expected = {
1505926800000L /* 20170920T10:00:00 UTC */, 1505858400000L /* 20170919T22:00:00 UTC */, 1505890800000L
/* 20170920T00:00:00 UTC */
};
entries.add(
new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH ZZZ", "1:MILLISECONDS:EPOCH", "1:HOURS", "CET", input,
expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch
String[] input = {"8/7/2017 1 AM", "12/27/2016 11 PM", "8/7/2017 12 AM", "8/7/2017 12 PM"};
long[] expected = {1502067600000L, 1482879600000L, 1502064000000L, 1502107200000L};
entries.add(
new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:M/d/yyyy h a", "1:MILLISECONDS:EPOCH", "1:HOURS", "CET", input,
expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch, with bucketing
String[] input =
{"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM", "8/7/2017 12:00:01 PM"};
long[] expected = {1502067600000L, 1482879600000L, 1502064000000L, 1502107200000L};
entries.add(new Object[]{
"1:SECONDS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:MILLISECONDS:EPOCH", "1:HOURS", "CET", input, expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch, without bucketing
String[] input =
{"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM", "8/7/2017 12:00:01 PM"};
long[] expected = {1502067600000L, 1482880800000L, 1502066750000L, 1502107201000L};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:MILLISECONDS:EPOCH", "1:MILLISECONDS", "CET", input,
expected
});
}
/*************** SDF to SDF ***************/
{
// Test conversion from simple date format to another simple date format
String[] input = {"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM"};
String[] expected = {"20170807", "20161227", "20170807"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS",
"CET", input,
expected
});
}
{
// Test conversion from simple date format with timezone to another simple date format
String[] input = {"20170920 America/Chicago", "20170919 America/Los_Angeles", "20170921 Asia/Kolkata"};
String[] expected = {"20170920", "20170919", "20170920"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd ZZZ", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS", "CET",
input,
expected
});
}
{
// Test conversion from simple date format with timezone to another simple date format with timezone
String[] input = {"20170920 America/New_York", "20170919 America/Los_Angeles", "20170921 Asia/Kolkata"};
String[] expected = {"20170919", "20170919", "20170920"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd ZZZ", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/Chicago)",
"1:MILLISECONDS", "CET", input, expected
});
}
// additional granularity tests
{
// Test conversion from simple date format to another simple date format (America/Denver timezone with 15
// second granularity)
String[] input = {"20180412T19:16:38", "20180412T19:16:29", "20180412T19:17:12"};
String[] expected = {"2018-04-12 13:16:30.000", "2018-04-12 13:16:15.000", "2018-04-12 13:17:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)", "15:SECONDS", "CET", input,
expected
});
}
{
// Test conversion from simple date format to another simple date format (America/Denver timezone with 5 day
// granularity)
String[] input = {"20180412T19:16:38", "20180419T17:55:02", "20180328T09:43:25"};
String[] expected = {"2018-04-10 16:00:00.000", "2018-04-15 16:00:00.000", "2018-03-25 16:00:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)", "5:DAYS", "CET", input,
expected
});
}
{
// Test conversion from simple date format to another simple date format (America/Los_Angeles timezone with 1
// day granularity)
String[] input = {"20180418T10:00:00", "20180418T01:00:00"};
String[] expected = {"2018-04-17 15:00:00.000", "2018-04-17 15:00:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Los_Angeles)", "1:DAYS", "CET",
input, expected
});
}
return entries.toArray(new Object[entries.size()][]);
}
@DataProvider(name = "testDateTimeConversion")
public Object[][] testDateTimeConversion() {
List<Object[]> entries = new ArrayList<>();
/*************** Epoch to Epoch ***************/
{
// Test bucketing to 15 minutes
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898000000L /* 20170920T02:00:00 */, 1505898900000L
/* 20170920T02:15:00 */
};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:EPOCH", "15:MINUTES", input, expected});
}
{
// Test input which should create no change
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505898960000L
/* 20170920T02:16:00 */
};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:EPOCH", "1:MILLISECONDS", input, expected});
}
{
// Test conversion from millis to hours
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505898300000L /* 20170920T02:05:00 */, 1505902560000L
/* 20170920T03:16:00 */
};
long[] expected =
{418305L /* 20170920T02:00:00 */, 418305L /* 20170920T02:00:00 */, 418306L /* 20170920T03:00:00 */};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:HOURS:EPOCH", "1:HOURS", input, expected});
}
{
// Test conversion from 5 minutes to hours
long[] input =
{5019660L /* 20170920T02:00:00 */, 5019661L /* 20170920T02:05:00 */, 5019675L /* 20170920T03:15:00 */};
long[] expected =
{418305L /* 20170920T02:00:00 */, 418305L /* 20170920T02:00:00 */, 418306L /* 20170920T03:00:00 */};
entries.add(new Object[]{"5:MINUTES:EPOCH", "1:HOURS:EPOCH", "1:HOURS", input, expected});
}
{
// Test conversion from 5 minutes to millis and bucketing to hours
long[] input =
{5019660L /* 20170920T02:00:00 */, 5019661L /* 20170920T02:05:00 */, 5019675L /* 20170920T03:15:00 */};
long[] expected = {
1505898000000L /* 20170920T02:00:00 */, 1505898000000L /* 20170920T02:00:00 */, 1505901600000L
/* 20170920T03:00:00 */
};
entries.add(new Object[]{"5:MINUTES:EPOCH", "1:MILLISECONDS:EPOCH", "1:HOURS", input, expected});
}
{
// Test conversion to non-java time unit WEEKS
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505199600000L /* 20170912T00:00:00 */, 1504257300000L
/* 20170901T00:20:00 */
};
long[] expected = {2489L, 2488L, 2487L};
entries.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:WEEKS:EPOCH", "1:MILLISECONDS", input, expected});
}
/*************** Epoch to SDF ***************/
{
// Test conversion from millis since epoch to simple date format (UTC)
long[] input = {
1505890800000L /* 20170920T00:00:00 */, 1505962800000L /* 20170920T20:00:00 */, 1505985360000L
/* 20170921T02:16:00 */
};
String[] expected = {"20170920", "20170921", "20170921"};
entries
.add(new Object[]{"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:DAYS", input, expected});
}
{
// Test conversion from millis since epoch to simple date format (Pacific timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505952000000L /* 20170920T17:00:00 */, 1505962800000L
/* 20170920T20:00:00 */
};
String[] expected = {"20170920", "20170920", "20170920"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/Los_Angeles)", "1:DAYS", input,
expected
});
}
{
// Test conversion from millis since epoch to simple date format (IST)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505941200000L /* 20170920T14:00:00 */, 1505962800000L
/* 20170920T20:00:00 */
};
String[] expected = {"20170920", "20170921", "20170921"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(IST)", "1:DAYS", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (Pacific timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505952000000L /* 20170920T17:00:00 */, 1505962800000L
/* 20170920T20:00:00 */
};
String[] expected = {"2017092002", "2017092017", "2017092020"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/Los_Angeles)", "1:HOURS", input,
expected
});
}
{
// Test conversion from millis since epoch to simple date format (East Coast timezone)
long[] input = {
1505898000000L /* 20170920T02:00:00 */, 1505941200000L /* 20170920T14:00:00 */, 1505970000000L
/* 20170920T22:00:00 */
};
String[] expected = {"2017092005", "2017092017", "2017092101"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/New_York)", "1:HOURS", input,
expected
});
}
// additional granularity tests
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 15 second
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523560589000L /* 20180412T19:16:29 */, 1523560632000L
/* 20180412T19:17:12 */
};
String[] expected = {"2018-04-12 13:16:30.000", "2018-04-12 13:16:15.000", "2018-04-12 13:17:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"15:SECONDS", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 3 minute
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523560708000L /* 20180412T19:18:28 */, 1523561708000L
/* 20180412T19:35:08 */
};
String[] expected = {"2018-04-12 13:15:00.000", "2018-04-12 13:18:00.000", "2018-04-12 13:33:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"3:MINUTES", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 12 hour
// granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1523460502000L /* 20180411T15:28:22 */, 1523430205000L
/* 20180411T07:03:25 */
};
String[] expected = {"2018-04-12 12:00:00.000", "2018-04-11 00:00:00.000", "2018-04-11 00:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"12:HOURS", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Denver timezone with 5 day granularity)
long[] input = {
1523560598000L /* 20180412T19:16:38 */, 1524160502000L /* 20180419T17:55:02 */, 1522230205000L
/* 20180328T09:43:25 */
};
String[] expected = {"2018-04-11 00:00:00.000", "2018-04-16 00:00:00.000", "2018-03-26 00:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)",
"5:DAYS", input, expected
});
}
{
// Test conversion from millis since epoch to simple date format (America/Los_Angeles timezone with 1 day
// granularity)
long[] input = {1524045600000L /* 20180418T10:00:00 */, 1524013200000L /* 20180418T01:00:00 */};
String[] expected = {"2018-04-18 00:00:00.000", "2018-04-17 00:00:00.000"};
entries.add(new Object[]{
"1:MILLISECONDS:EPOCH", "1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Los_Angeles)",
"1:DAYS", input, expected
});
}
/*************** SDF to Epoch ***************/
{
// Test conversion from simple date format to millis since epoch
String[] input =
{"20170920" /* 20170920T00:00:00 */, "20170601" /* 20170601T00:00:00 */, "20170921" /* 20170921T00:00:00 */};
long[] expected = {
1505865600000L /* 20170920T00:00:00 */, 1496275200000L /* 20170601T00:00:00 */, 1505952000000L
/* 20170921T00:00:00 */
};
entries
.add(new Object[]{"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS:EPOCH", "1:DAYS", input, expected});
}
{
// Test conversion from simple date format (East Coast timezone) to millis since epoch
// Converted to
String[] input =
{"20170920" /* 20170920T00:00:00 */, "20170601" /* 20170601T00:00:00 */, "20170921" /* 20170921T00:00:00 */};
long[] expected = {
1505865600000L /* 20170920T00:00:00 */, 1496275200000L /* 20170601T00:00:00 */, 1505952000000L
/* 20170921T00:00:00 */
};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/New_York)", "1:MILLISECONDS:EPOCH", "1:DAYS", input, expected
});
}
{
// Test conversion from simple date format (East Coast timezone) to millis since epoch
// Converted to
String[] input = {
"2017092013" /* 20170920T00:00:00 */, "2017092001" /* 20170601T00:00:00 */, "2017092000"
/* 20170921T00:00:00 */
};
long[] expected = {
1505926800000L /* 20170920T13:00:00 Eastern */, 1505883600000L /* 20170920T01:00:00 Eastern */, 1505880000000L
/* 20170920T00:00:00 Eastern */
};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH tz(America/New_York)", "1:MILLISECONDS:EPOCH", "1:HOURS", input,
expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch
String[] input = {"2017092013 America/New_York", "2017092004 Asia/Kolkata", "2017092000 America/Los_Angeles"};
long[] expected = {
1505926800000L /* 20170920T10:00:00 UTC */, 1505858400000L /* 20170919T22:00:00 UTC */, 1505890800000L
/* 20170920T00:00:00 UTC */
};
entries.add(
new Object[]{"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMddHH ZZZ", "1:MILLISECONDS:EPOCH", "1:HOURS", input, expected});
}
{
// Test conversion from simple date format with special characters to millis since epoch
String[] input = {"8/7/2017 1 AM", "12/27/2016 11 PM", "8/7/2017 12 AM", "8/7/2017 12 PM"};
long[] expected = {1502067600000L, 1482879600000L, 1502064000000L, 1502107200000L};
entries.add(
new Object[]{"1:HOURS:SIMPLE_DATE_FORMAT:M/d/yyyy h a", "1:MILLISECONDS:EPOCH", "1:HOURS", input, expected});
}
{
// Test conversion from simple date format with special characters to millis since epoch, with bucketing
String[] input =
{"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM", "8/7/2017 12:00:01 PM"};
long[] expected = {1502067600000L, 1482879600000L, 1502064000000L, 1502107200000L};
entries.add(new Object[]{
"1:SECONDS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:MILLISECONDS:EPOCH", "1:HOURS", input, expected
});
}
{
// Test conversion from simple date format with special characters to millis since epoch, without bucketing
String[] input =
{"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM", "8/7/2017 12:00:01 PM"};
long[] expected = {1502067600000L, 1482880800000L, 1502066750000L, 1502107201000L};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:MILLISECONDS:EPOCH", "1:MILLISECONDS", input, expected
});
}
/*************** SDF to SDF ***************/
{
// Test conversion from simple date format to another simple date format
String[] input = {"8/7/2017 1:00:00 AM", "12/27/2016 11:20:00 PM", "8/7/2017 12:45:50 AM"};
String[] expected = {"20170807", "20161227", "20170807"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:M/d/yyyy h:mm:ss a", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS", input,
expected
});
}
{
// Test conversion from simple date format with timezone to another simple date format
String[] input = {"20170920 America/Chicago", "20170919 America/Los_Angeles", "20170921 Asia/Kolkata"};
String[] expected = {"20170920", "20170919", "20170920"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd ZZZ", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:MILLISECONDS", input,
expected
});
}
{
// Test conversion from simple date format with timezone to another simple date format with timezone
String[] input = {"20170920 America/New_York", "20170919 America/Los_Angeles", "20170921 Asia/Kolkata"};
String[] expected = {"20170919", "20170919", "20170920"};
entries.add(new Object[]{
"1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd ZZZ", "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd tz(America/Chicago)",
"1:MILLISECONDS", input, expected
});
}
// additional granularity tests
{
// Test conversion from simple date format to another simple date format (America/Denver timezone with 15
// second granularity)
String[] input = {"20180412T19:16:38", "20180412T19:16:29", "20180412T19:17:12"};
String[] expected = {"2018-04-12 13:16:30.000", "2018-04-12 13:16:15.000", "2018-04-12 13:17:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)", "15:SECONDS", input, expected
});
}
{
// Test conversion from simple date format to another simple date format (America/Denver timezone with 5 day
// granularity)
String[] input = {"20180412T19:16:38", "20180419T17:55:02", "20180328T09:43:25"};
String[] expected = {"2018-04-11 00:00:00.000", "2018-04-16 00:00:00.000", "2018-03-26 00:00:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Denver)", "5:DAYS", input, expected
});
}
{
// Test conversion from simple date format to another simple date format (America/Los_Angeles timezone with 1
// day granularity)
String[] input = {"20180418T10:00:00", "20180418T01:00:00"};
String[] expected = {"2018-04-18 00:00:00.000", "2018-04-17 00:00:00.000"};
entries.add(new Object[]{
"1:HOURS:SIMPLE_DATE_FORMAT:yyyyMMdd'T'HH:mm:ss",
"1:MILLISECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSS tz(America/Los_Angeles)", "1:DAYS", input, expected
});
}
return entries.toArray(new Object[entries.size()][]);
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v4/2.0.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v5/2.0.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v4/1.31.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-cloud-java | 35,490 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyReservationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Reservations.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyReservationRequest}
*/
public final class GetIamPolicyReservationRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyReservationRequest)
GetIamPolicyReservationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyReservationRequest.newBuilder() to construct.
private GetIamPolicyReservationRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyReservationRequest() {
project_ = "";
resource_ = "";
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyReservationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyReservationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyReservationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyReservationRequest.class,
com.google.cloud.compute.v1.GetIamPolicyReservationRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 3744684;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyReservationRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyReservationRequest other =
(com.google.cloud.compute.v1.GetIamPolicyReservationRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyReservationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Reservations.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyReservationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyReservationRequest)
com.google.cloud.compute.v1.GetIamPolicyReservationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyReservationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyReservationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyReservationRequest.class,
com.google.cloud.compute.v1.GetIamPolicyReservationRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyReservationRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
resource_ = "";
zone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyReservationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyReservationRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyReservationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyReservationRequest build() {
com.google.cloud.compute.v1.GetIamPolicyReservationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyReservationRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyReservationRequest result =
new com.google.cloud.compute.v1.GetIamPolicyReservationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.GetIamPolicyReservationRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.zone_ = zone_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyReservationRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyReservationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyReservationRequest other) {
if (other == com.google.cloud.compute.v1.GetIamPolicyReservationRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 29957474:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 29957474
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyReservationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyReservationRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyReservationRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyReservationRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyReservationRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyReservationRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyReservationRequest>() {
@java.lang.Override
public GetIamPolicyReservationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyReservationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyReservationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyReservationRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,490 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyStoragePoolRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for StoragePools.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyStoragePoolRequest}
*/
public final class GetIamPolicyStoragePoolRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyStoragePoolRequest)
GetIamPolicyStoragePoolRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyStoragePoolRequest.newBuilder() to construct.
private GetIamPolicyStoragePoolRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyStoragePoolRequest() {
project_ = "";
resource_ = "";
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyStoragePoolRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyStoragePoolRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyStoragePoolRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.class,
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 3744684;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest other =
(com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for StoragePools.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyStoragePoolRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyStoragePoolRequest)
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyStoragePoolRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyStoragePoolRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.class,
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
resource_ = "";
zone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyStoragePoolRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest build() {
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest result =
new com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.zone_ = zone_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest other) {
if (other == com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 29957474:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 29957474
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyStoragePoolRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyStoragePoolRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyStoragePoolRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyStoragePoolRequest>() {
@java.lang.Override
public GetIamPolicyStoragePoolRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyStoragePoolRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyStoragePoolRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyStoragePoolRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,825 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/SearchAudience.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.SearchAudience}
*/
public final class SearchAudience extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.SearchAudience)
SearchAudienceOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchAudience.newBuilder() to construct.
private SearchAudience(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchAudience() {
audienceAttributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SearchAudience();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.SearchAudience.class, com.google.ads.googleads.v19.services.SearchAudience.Builder.class);
}
public static final int AUDIENCE_ATTRIBUTES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> audienceAttributes_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> getAudienceAttributesList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAudienceAttributesCount() {
return audienceAttributes_.size();
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
return audienceAttributes_.get(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
return audienceAttributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < audienceAttributes_.size(); i++) {
output.writeMessage(1, audienceAttributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < audienceAttributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, audienceAttributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.SearchAudience)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.SearchAudience other = (com.google.ads.googleads.v19.services.SearchAudience) obj;
if (!getAudienceAttributesList()
.equals(other.getAudienceAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAudienceAttributesCount() > 0) {
hash = (37 * hash) + AUDIENCE_ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAudienceAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.SearchAudience prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.SearchAudience}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.SearchAudience)
com.google.ads.googleads.v19.services.SearchAudienceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.SearchAudience.class, com.google.ads.googleads.v19.services.SearchAudience.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.SearchAudience.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
} else {
audienceAttributes_ = null;
audienceAttributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_SearchAudience_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SearchAudience getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.SearchAudience.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SearchAudience build() {
com.google.ads.googleads.v19.services.SearchAudience result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SearchAudience buildPartial() {
com.google.ads.googleads.v19.services.SearchAudience result = new com.google.ads.googleads.v19.services.SearchAudience(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.SearchAudience result) {
if (audienceAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = java.util.Collections.unmodifiableList(audienceAttributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.audienceAttributes_ = audienceAttributes_;
} else {
result.audienceAttributes_ = audienceAttributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.SearchAudience result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.SearchAudience) {
return mergeFrom((com.google.ads.googleads.v19.services.SearchAudience)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.SearchAudience other) {
if (other == com.google.ads.googleads.v19.services.SearchAudience.getDefaultInstance()) return this;
if (audienceAttributesBuilder_ == null) {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributes_.isEmpty()) {
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAudienceAttributesIsMutable();
audienceAttributes_.addAll(other.audienceAttributes_);
}
onChanged();
}
} else {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributesBuilder_.isEmpty()) {
audienceAttributesBuilder_.dispose();
audienceAttributesBuilder_ = null;
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
audienceAttributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAudienceAttributesFieldBuilder() : null;
} else {
audienceAttributesBuilder_.addAllMessages(other.audienceAttributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v19.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(m);
} else {
audienceAttributesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> audienceAttributes_ =
java.util.Collections.emptyList();
private void ensureAudienceAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = new java.util.ArrayList<com.google.ads.googleads.v19.common.AudienceInsightsAttribute>(audienceAttributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder> audienceAttributesBuilder_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute> getAudienceAttributesList() {
if (audienceAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(audienceAttributes_);
} else {
return audienceAttributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAudienceAttributesCount() {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.size();
} else {
return audienceAttributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index);
} else {
return audienceAttributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, value);
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAudienceAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttribute> values) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, audienceAttributes_);
onChanged();
} else {
audienceAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAudienceAttributes() {
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
audienceAttributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.remove(index);
onChanged();
} else {
audienceAttributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder getAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index); } else {
return audienceAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
if (audienceAttributesBuilder_ != null) {
return audienceAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(audienceAttributes_);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder() {
return getAudienceAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v19.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder>
getAudienceAttributesBuilderList() {
return getAudienceAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesFieldBuilder() {
if (audienceAttributesBuilder_ == null) {
audienceAttributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttribute, com.google.ads.googleads.v19.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeOrBuilder>(
audienceAttributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
audienceAttributes_ = null;
}
return audienceAttributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.SearchAudience)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.SearchAudience)
private static final com.google.ads.googleads.v19.services.SearchAudience DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.SearchAudience();
}
public static com.google.ads.googleads.v19.services.SearchAudience getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchAudience>
PARSER = new com.google.protobuf.AbstractParser<SearchAudience>() {
@java.lang.Override
public SearchAudience parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchAudience> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchAudience> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SearchAudience getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,825 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/SearchAudience.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.SearchAudience}
*/
public final class SearchAudience extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.SearchAudience)
SearchAudienceOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchAudience.newBuilder() to construct.
private SearchAudience(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchAudience() {
audienceAttributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SearchAudience();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.SearchAudience.class, com.google.ads.googleads.v20.services.SearchAudience.Builder.class);
}
public static final int AUDIENCE_ATTRIBUTES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> audienceAttributes_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> getAudienceAttributesList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAudienceAttributesCount() {
return audienceAttributes_.size();
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
return audienceAttributes_.get(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
return audienceAttributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < audienceAttributes_.size(); i++) {
output.writeMessage(1, audienceAttributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < audienceAttributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, audienceAttributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.SearchAudience)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.SearchAudience other = (com.google.ads.googleads.v20.services.SearchAudience) obj;
if (!getAudienceAttributesList()
.equals(other.getAudienceAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAudienceAttributesCount() > 0) {
hash = (37 * hash) + AUDIENCE_ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAudienceAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.SearchAudience prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.SearchAudience}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.SearchAudience)
com.google.ads.googleads.v20.services.SearchAudienceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.SearchAudience.class, com.google.ads.googleads.v20.services.SearchAudience.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.SearchAudience.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
} else {
audienceAttributes_ = null;
audienceAttributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_SearchAudience_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SearchAudience getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.SearchAudience.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SearchAudience build() {
com.google.ads.googleads.v20.services.SearchAudience result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SearchAudience buildPartial() {
com.google.ads.googleads.v20.services.SearchAudience result = new com.google.ads.googleads.v20.services.SearchAudience(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.SearchAudience result) {
if (audienceAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = java.util.Collections.unmodifiableList(audienceAttributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.audienceAttributes_ = audienceAttributes_;
} else {
result.audienceAttributes_ = audienceAttributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.SearchAudience result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.SearchAudience) {
return mergeFrom((com.google.ads.googleads.v20.services.SearchAudience)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.SearchAudience other) {
if (other == com.google.ads.googleads.v20.services.SearchAudience.getDefaultInstance()) return this;
if (audienceAttributesBuilder_ == null) {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributes_.isEmpty()) {
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAudienceAttributesIsMutable();
audienceAttributes_.addAll(other.audienceAttributes_);
}
onChanged();
}
} else {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributesBuilder_.isEmpty()) {
audienceAttributesBuilder_.dispose();
audienceAttributesBuilder_ = null;
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
audienceAttributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAudienceAttributesFieldBuilder() : null;
} else {
audienceAttributesBuilder_.addAllMessages(other.audienceAttributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v20.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(m);
} else {
audienceAttributesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> audienceAttributes_ =
java.util.Collections.emptyList();
private void ensureAudienceAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = new java.util.ArrayList<com.google.ads.googleads.v20.common.AudienceInsightsAttribute>(audienceAttributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder> audienceAttributesBuilder_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute> getAudienceAttributesList() {
if (audienceAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(audienceAttributes_);
} else {
return audienceAttributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAudienceAttributesCount() {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.size();
} else {
return audienceAttributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index);
} else {
return audienceAttributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, value);
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAudienceAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttribute> values) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, audienceAttributes_);
onChanged();
} else {
audienceAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAudienceAttributes() {
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
audienceAttributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.remove(index);
onChanged();
} else {
audienceAttributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder getAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index); } else {
return audienceAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
if (audienceAttributesBuilder_ != null) {
return audienceAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(audienceAttributes_);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder() {
return getAudienceAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v20.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder>
getAudienceAttributesBuilderList() {
return getAudienceAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesFieldBuilder() {
if (audienceAttributesBuilder_ == null) {
audienceAttributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttribute, com.google.ads.googleads.v20.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeOrBuilder>(
audienceAttributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
audienceAttributes_ = null;
}
return audienceAttributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.SearchAudience)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.SearchAudience)
private static final com.google.ads.googleads.v20.services.SearchAudience DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.SearchAudience();
}
public static com.google.ads.googleads.v20.services.SearchAudience getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchAudience>
PARSER = new com.google.protobuf.AbstractParser<SearchAudience>() {
@java.lang.Override
public SearchAudience parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchAudience> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchAudience> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SearchAudience getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,825 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/SearchAudience.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.SearchAudience}
*/
public final class SearchAudience extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.SearchAudience)
SearchAudienceOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchAudience.newBuilder() to construct.
private SearchAudience(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchAudience() {
audienceAttributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SearchAudience();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.SearchAudience.class, com.google.ads.googleads.v21.services.SearchAudience.Builder.class);
}
public static final int AUDIENCE_ATTRIBUTES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> audienceAttributes_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> getAudienceAttributesList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
return audienceAttributes_;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getAudienceAttributesCount() {
return audienceAttributes_.size();
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
return audienceAttributes_.get(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
return audienceAttributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < audienceAttributes_.size(); i++) {
output.writeMessage(1, audienceAttributes_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < audienceAttributes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, audienceAttributes_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.SearchAudience)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.SearchAudience other = (com.google.ads.googleads.v21.services.SearchAudience) obj;
if (!getAudienceAttributesList()
.equals(other.getAudienceAttributesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAudienceAttributesCount() > 0) {
hash = (37 * hash) + AUDIENCE_ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAudienceAttributesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SearchAudience parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.SearchAudience prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A collection of audience attributes that describe an audience of viewers.
* This is used to search for topics trending for the defined audience.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.SearchAudience}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.SearchAudience)
com.google.ads.googleads.v21.services.SearchAudienceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_SearchAudience_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_SearchAudience_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.SearchAudience.class, com.google.ads.googleads.v21.services.SearchAudience.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.SearchAudience.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
} else {
audienceAttributes_ = null;
audienceAttributesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_SearchAudience_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SearchAudience getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.SearchAudience.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SearchAudience build() {
com.google.ads.googleads.v21.services.SearchAudience result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SearchAudience buildPartial() {
com.google.ads.googleads.v21.services.SearchAudience result = new com.google.ads.googleads.v21.services.SearchAudience(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.SearchAudience result) {
if (audienceAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = java.util.Collections.unmodifiableList(audienceAttributes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.audienceAttributes_ = audienceAttributes_;
} else {
result.audienceAttributes_ = audienceAttributesBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.SearchAudience result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.SearchAudience) {
return mergeFrom((com.google.ads.googleads.v21.services.SearchAudience)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.SearchAudience other) {
if (other == com.google.ads.googleads.v21.services.SearchAudience.getDefaultInstance()) return this;
if (audienceAttributesBuilder_ == null) {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributes_.isEmpty()) {
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAudienceAttributesIsMutable();
audienceAttributes_.addAll(other.audienceAttributes_);
}
onChanged();
}
} else {
if (!other.audienceAttributes_.isEmpty()) {
if (audienceAttributesBuilder_.isEmpty()) {
audienceAttributesBuilder_.dispose();
audienceAttributesBuilder_ = null;
audienceAttributes_ = other.audienceAttributes_;
bitField0_ = (bitField0_ & ~0x00000001);
audienceAttributesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAudienceAttributesFieldBuilder() : null;
} else {
audienceAttributesBuilder_.addAllMessages(other.audienceAttributes_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v21.common.AudienceInsightsAttribute m =
input.readMessage(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.parser(),
extensionRegistry);
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(m);
} else {
audienceAttributesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> audienceAttributes_ =
java.util.Collections.emptyList();
private void ensureAudienceAttributesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
audienceAttributes_ = new java.util.ArrayList<com.google.ads.googleads.v21.common.AudienceInsightsAttribute>(audienceAttributes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder> audienceAttributesBuilder_;
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute> getAudienceAttributesList() {
if (audienceAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(audienceAttributes_);
} else {
return audienceAttributesBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getAudienceAttributesCount() {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.size();
} else {
return audienceAttributesBuilder_.getCount();
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute getAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index);
} else {
return audienceAttributesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, value);
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setAudienceAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.set(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute value) {
if (audienceAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, value);
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAudienceAttributes(
int index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder builderForValue) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.add(index, builderForValue.build());
onChanged();
} else {
audienceAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllAudienceAttributes(
java.lang.Iterable<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttribute> values) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, audienceAttributes_);
onChanged();
} else {
audienceAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearAudienceAttributes() {
if (audienceAttributesBuilder_ == null) {
audienceAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
audienceAttributesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeAudienceAttributes(int index) {
if (audienceAttributesBuilder_ == null) {
ensureAudienceAttributesIsMutable();
audienceAttributes_.remove(index);
onChanged();
} else {
audienceAttributesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder getAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder getAudienceAttributesOrBuilder(
int index) {
if (audienceAttributesBuilder_ == null) {
return audienceAttributes_.get(index); } else {
return audienceAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesOrBuilderList() {
if (audienceAttributesBuilder_ != null) {
return audienceAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(audienceAttributes_);
}
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder() {
return getAudienceAttributesFieldBuilder().addBuilder(
com.google.ads.googleads.v21.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder addAudienceAttributesBuilder(
int index) {
return getAudienceAttributesFieldBuilder().addBuilder(
index, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.getDefaultInstance());
}
/**
* <pre>
* Required. Audience attributes that describe an audience of viewers. This is
* used to search for topics trending for the defined audience.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.AudienceInsightsAttribute audience_attributes = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder>
getAudienceAttributesBuilderList() {
return getAudienceAttributesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>
getAudienceAttributesFieldBuilder() {
if (audienceAttributesBuilder_ == null) {
audienceAttributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttribute, com.google.ads.googleads.v21.common.AudienceInsightsAttribute.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeOrBuilder>(
audienceAttributes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
audienceAttributes_ = null;
}
return audienceAttributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.SearchAudience)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.SearchAudience)
private static final com.google.ads.googleads.v21.services.SearchAudience DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.SearchAudience();
}
public static com.google.ads.googleads.v21.services.SearchAudience getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchAudience>
PARSER = new com.google.protobuf.AbstractParser<SearchAudience>() {
@java.lang.Override
public SearchAudience parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchAudience> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchAudience> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SearchAudience getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,466 | java-maps-mapsplatformdatasets/proto-google-maps-mapsplatformdatasets-v1/src/main/java/com/google/maps/mapsplatformdatasets/v1/FetchDatasetErrorsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/maps/mapsplatformdatasets/v1/maps_platform_datasets.proto
// Protobuf Java Version: 3.25.8
package com.google.maps.mapsplatformdatasets.v1;
/**
*
*
* <pre>
* Response object of FetchDatasetErrors.
* </pre>
*
* Protobuf type {@code google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse}
*/
public final class FetchDatasetErrorsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse)
FetchDatasetErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use FetchDatasetErrorsResponse.newBuilder() to construct.
private FetchDatasetErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FetchDatasetErrorsResponse() {
nextPageToken_ = "";
errors_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FetchDatasetErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_FetchDatasetErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_FetchDatasetErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse.class,
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse.Builder.class);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ERRORS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorsOrBuilderList() {
return errors_;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(int index) {
return errors_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(3, errors_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, errors_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse)) {
return super.equals(obj);
}
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse other =
(com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse) obj;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getErrorsList().equals(other.getErrorsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response object of FetchDatasetErrors.
* </pre>
*
* Protobuf type {@code google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse)
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_FetchDatasetErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_FetchDatasetErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse.class,
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse.Builder.class);
}
// Construct using
// com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nextPageToken_ = "";
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.maps.mapsplatformdatasets.v1.MapsPlatformDatasetsProto
.internal_static_google_maps_mapsplatformdatasets_v1_FetchDatasetErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
getDefaultInstanceForType() {
return com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse build() {
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse buildPartial() {
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse result =
new com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse) {
return mergeFrom(
(com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse other) {
if (other
== com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
.getDefaultInstance()) return this;
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000001;
onChanged();
}
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000002);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getErrorsFieldBuilder()
: null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 18
case 26:
{
com.google.rpc.Status m =
input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
*
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.rpc.Status> errors_ = java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
errorsBuilder_;
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder setErrors(int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder setErrors(int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder addErrors(int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder addErrors(com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder addErrors(int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder addAllErrors(java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(com.google.rpc.Status.getDefaultInstance());
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(int index) {
return getErrorsFieldBuilder().addBuilder(index, com.google.rpc.Status.getDefaultInstance());
}
/**
*
*
* <pre>
* The errors associated with a dataset.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 3;</code>
*/
public java.util.List<com.google.rpc.Status.Builder> getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(
errors_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
errors_ = null;
}
return errorsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse)
private static final com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse();
}
public static com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FetchDatasetErrorsResponse> PARSER =
new com.google.protobuf.AbstractParser<FetchDatasetErrorsResponse>() {
@java.lang.Override
public FetchDatasetErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FetchDatasetErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FetchDatasetErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.maps.mapsplatformdatasets.v1.FetchDatasetErrorsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,507 | java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobExecutionDetails.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/metrics.proto
// Protobuf Java Version: 3.25.8
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* Information about the execution of a job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.JobExecutionDetails}
*/
public final class JobExecutionDetails extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.JobExecutionDetails)
JobExecutionDetailsOrBuilder {
private static final long serialVersionUID = 0L;
// Use JobExecutionDetails.newBuilder() to construct.
private JobExecutionDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private JobExecutionDetails() {
stages_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new JobExecutionDetails();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_JobExecutionDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_JobExecutionDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.JobExecutionDetails.class,
com.google.dataflow.v1beta3.JobExecutionDetails.Builder.class);
}
public static final int STAGES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.dataflow.v1beta3.StageSummary> stages_;
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.dataflow.v1beta3.StageSummary> getStagesList() {
return stages_;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.dataflow.v1beta3.StageSummaryOrBuilder>
getStagesOrBuilderList() {
return stages_;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
@java.lang.Override
public int getStagesCount() {
return stages_.size();
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.StageSummary getStages(int index) {
return stages_.get(index);
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.StageSummaryOrBuilder getStagesOrBuilder(int index) {
return stages_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < stages_.size(); i++) {
output.writeMessage(1, stages_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < stages_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, stages_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.JobExecutionDetails)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.JobExecutionDetails other =
(com.google.dataflow.v1beta3.JobExecutionDetails) obj;
if (!getStagesList().equals(other.getStagesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getStagesCount() > 0) {
hash = (37 * hash) + STAGES_FIELD_NUMBER;
hash = (53 * hash) + getStagesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.JobExecutionDetails parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.JobExecutionDetails prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information about the execution of a job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.JobExecutionDetails}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.JobExecutionDetails)
com.google.dataflow.v1beta3.JobExecutionDetailsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_JobExecutionDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_JobExecutionDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.JobExecutionDetails.class,
com.google.dataflow.v1beta3.JobExecutionDetails.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.JobExecutionDetails.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (stagesBuilder_ == null) {
stages_ = java.util.Collections.emptyList();
} else {
stages_ = null;
stagesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_JobExecutionDetails_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobExecutionDetails getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.JobExecutionDetails.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobExecutionDetails build() {
com.google.dataflow.v1beta3.JobExecutionDetails result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobExecutionDetails buildPartial() {
com.google.dataflow.v1beta3.JobExecutionDetails result =
new com.google.dataflow.v1beta3.JobExecutionDetails(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.dataflow.v1beta3.JobExecutionDetails result) {
if (stagesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
stages_ = java.util.Collections.unmodifiableList(stages_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.stages_ = stages_;
} else {
result.stages_ = stagesBuilder_.build();
}
}
private void buildPartial0(com.google.dataflow.v1beta3.JobExecutionDetails result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.JobExecutionDetails) {
return mergeFrom((com.google.dataflow.v1beta3.JobExecutionDetails) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.JobExecutionDetails other) {
if (other == com.google.dataflow.v1beta3.JobExecutionDetails.getDefaultInstance())
return this;
if (stagesBuilder_ == null) {
if (!other.stages_.isEmpty()) {
if (stages_.isEmpty()) {
stages_ = other.stages_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureStagesIsMutable();
stages_.addAll(other.stages_);
}
onChanged();
}
} else {
if (!other.stages_.isEmpty()) {
if (stagesBuilder_.isEmpty()) {
stagesBuilder_.dispose();
stagesBuilder_ = null;
stages_ = other.stages_;
bitField0_ = (bitField0_ & ~0x00000001);
stagesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getStagesFieldBuilder()
: null;
} else {
stagesBuilder_.addAllMessages(other.stages_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.dataflow.v1beta3.StageSummary m =
input.readMessage(
com.google.dataflow.v1beta3.StageSummary.parser(), extensionRegistry);
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
stages_.add(m);
} else {
stagesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.dataflow.v1beta3.StageSummary> stages_ =
java.util.Collections.emptyList();
private void ensureStagesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
stages_ = new java.util.ArrayList<com.google.dataflow.v1beta3.StageSummary>(stages_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.StageSummary,
com.google.dataflow.v1beta3.StageSummary.Builder,
com.google.dataflow.v1beta3.StageSummaryOrBuilder>
stagesBuilder_;
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.StageSummary> getStagesList() {
if (stagesBuilder_ == null) {
return java.util.Collections.unmodifiableList(stages_);
} else {
return stagesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public int getStagesCount() {
if (stagesBuilder_ == null) {
return stages_.size();
} else {
return stagesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public com.google.dataflow.v1beta3.StageSummary getStages(int index) {
if (stagesBuilder_ == null) {
return stages_.get(index);
} else {
return stagesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder setStages(int index, com.google.dataflow.v1beta3.StageSummary value) {
if (stagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStagesIsMutable();
stages_.set(index, value);
onChanged();
} else {
stagesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder setStages(
int index, com.google.dataflow.v1beta3.StageSummary.Builder builderForValue) {
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
stages_.set(index, builderForValue.build());
onChanged();
} else {
stagesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder addStages(com.google.dataflow.v1beta3.StageSummary value) {
if (stagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStagesIsMutable();
stages_.add(value);
onChanged();
} else {
stagesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder addStages(int index, com.google.dataflow.v1beta3.StageSummary value) {
if (stagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStagesIsMutable();
stages_.add(index, value);
onChanged();
} else {
stagesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder addStages(com.google.dataflow.v1beta3.StageSummary.Builder builderForValue) {
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
stages_.add(builderForValue.build());
onChanged();
} else {
stagesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder addStages(
int index, com.google.dataflow.v1beta3.StageSummary.Builder builderForValue) {
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
stages_.add(index, builderForValue.build());
onChanged();
} else {
stagesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder addAllStages(
java.lang.Iterable<? extends com.google.dataflow.v1beta3.StageSummary> values) {
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, stages_);
onChanged();
} else {
stagesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder clearStages() {
if (stagesBuilder_ == null) {
stages_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
stagesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public Builder removeStages(int index) {
if (stagesBuilder_ == null) {
ensureStagesIsMutable();
stages_.remove(index);
onChanged();
} else {
stagesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public com.google.dataflow.v1beta3.StageSummary.Builder getStagesBuilder(int index) {
return getStagesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public com.google.dataflow.v1beta3.StageSummaryOrBuilder getStagesOrBuilder(int index) {
if (stagesBuilder_ == null) {
return stages_.get(index);
} else {
return stagesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public java.util.List<? extends com.google.dataflow.v1beta3.StageSummaryOrBuilder>
getStagesOrBuilderList() {
if (stagesBuilder_ != null) {
return stagesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(stages_);
}
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public com.google.dataflow.v1beta3.StageSummary.Builder addStagesBuilder() {
return getStagesFieldBuilder()
.addBuilder(com.google.dataflow.v1beta3.StageSummary.getDefaultInstance());
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public com.google.dataflow.v1beta3.StageSummary.Builder addStagesBuilder(int index) {
return getStagesFieldBuilder()
.addBuilder(index, com.google.dataflow.v1beta3.StageSummary.getDefaultInstance());
}
/**
*
*
* <pre>
* The stages of the job execution.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.StageSummary stages = 1;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.StageSummary.Builder> getStagesBuilderList() {
return getStagesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.StageSummary,
com.google.dataflow.v1beta3.StageSummary.Builder,
com.google.dataflow.v1beta3.StageSummaryOrBuilder>
getStagesFieldBuilder() {
if (stagesBuilder_ == null) {
stagesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.StageSummary,
com.google.dataflow.v1beta3.StageSummary.Builder,
com.google.dataflow.v1beta3.StageSummaryOrBuilder>(
stages_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
stages_ = null;
}
return stagesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.JobExecutionDetails)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.JobExecutionDetails)
private static final com.google.dataflow.v1beta3.JobExecutionDetails DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.JobExecutionDetails();
}
public static com.google.dataflow.v1beta3.JobExecutionDetails getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<JobExecutionDetails> PARSER =
new com.google.protobuf.AbstractParser<JobExecutionDetails>() {
@java.lang.Override
public JobExecutionDetails parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<JobExecutionDetails> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<JobExecutionDetails> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.JobExecutionDetails getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v3.3/1.30.1/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v3.3/1.31.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v3.4/1.30.1/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v3.4/1.31.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,622 | clients/google-api-services-dfareporting/v3.5/1.31.0/com/google/api/services/dfareporting/model/Ad.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dfareporting.model;
/**
* Contains properties of a Campaign Manager ad.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Campaign Manager 360 API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Ad extends com.google.api.client.json.GenericJson {
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long accountId;
/**
* Whether this ad is active. When true, archived must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean active;
/**
* Advertiser ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long advertiserId;
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue advertiserIdDimensionValue;
/**
* Whether this ad is archived. When true, active must be false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean archived;
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long audienceSegmentId;
/**
* Campaign ID of this ad. This is a required field on insertion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long campaignId;
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue campaignIdDimensionValue;
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrl clickThroughUrl;
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties;
/**
* Comments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String comments;
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String compatibility;
/**
* Information about the creation of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo createInfo;
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<CreativeGroupAssignment> creativeGroupAssignments;
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CreativeRotation creativeRotation;
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DayPartTargeting dayPartTargeting;
/**
* Default click-through event tag properties for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties;
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeliverySchedule deliverySchedule;
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean dynamicClickTracker;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime endTime;
/**
* Event tag overrides for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EventTagOverride> eventTagOverrides;
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GeoTargeting geoTargeting;
/**
* ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long id;
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DimensionValue idDimensionValue;
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private KeyValueTargetingExpression keyValueTargetingExpression;
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LanguageTargeting languageTargeting;
/**
* Information about the most recent modification of this ad. This is a read-only field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LastModifiedInfo lastModifiedInfo;
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Placement assignments for this ad.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PlacementAssignment> placementAssignments;
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ListTargetingExpression remarketingListExpression;
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Size size;
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslCompliant;
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean sslRequired;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime startTime;
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long subaccountId;
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long targetingTemplateId;
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TechnologyTargeting technologyTargeting;
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getAccountId() {
return accountId;
}
/**
* Account ID of this ad. This is a read-only field that can be left blank.
* @param accountId accountId or {@code null} for none
*/
public Ad setAccountId(java.lang.Long accountId) {
this.accountId = accountId;
return this;
}
/**
* Whether this ad is active. When true, archived must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getActive() {
return active;
}
/**
* Whether this ad is active. When true, archived must be false.
* @param active active or {@code null} for none
*/
public Ad setActive(java.lang.Boolean active) {
this.active = active;
return this;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Advertiser ID of this ad. This is a required field on insertion.
* @param advertiserId advertiserId or {@code null} for none
*/
public Ad setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
return this;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getAdvertiserIdDimensionValue() {
return advertiserIdDimensionValue;
}
/**
* Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* @param advertiserIdDimensionValue advertiserIdDimensionValue or {@code null} for none
*/
public Ad setAdvertiserIdDimensionValue(DimensionValue advertiserIdDimensionValue) {
this.advertiserIdDimensionValue = advertiserIdDimensionValue;
return this;
}
/**
* Whether this ad is archived. When true, active must be false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getArchived() {
return archived;
}
/**
* Whether this ad is archived. When true, active must be false.
* @param archived archived or {@code null} for none
*/
public Ad setArchived(java.lang.Boolean archived) {
this.archived = archived;
return this;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getAudienceSegmentId() {
return audienceSegmentId;
}
/**
* Audience segment ID that is being targeted for this ad. Applicable when type is
* AD_SERVING_STANDARD_AD.
* @param audienceSegmentId audienceSegmentId or {@code null} for none
*/
public Ad setAudienceSegmentId(java.lang.Long audienceSegmentId) {
this.audienceSegmentId = audienceSegmentId;
return this;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @return value or {@code null} for none
*/
public java.lang.Long getCampaignId() {
return campaignId;
}
/**
* Campaign ID of this ad. This is a required field on insertion.
* @param campaignId campaignId or {@code null} for none
*/
public Ad setCampaignId(java.lang.Long campaignId) {
this.campaignId = campaignId;
return this;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getCampaignIdDimensionValue() {
return campaignIdDimensionValue;
}
/**
* Dimension value for the ID of the campaign. This is a read-only, auto-generated field.
* @param campaignIdDimensionValue campaignIdDimensionValue or {@code null} for none
*/
public Ad setCampaignIdDimensionValue(DimensionValue campaignIdDimensionValue) {
this.campaignIdDimensionValue = campaignIdDimensionValue;
return this;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @return value or {@code null} for none
*/
public ClickThroughUrl getClickThroughUrl() {
return clickThroughUrl;
}
/**
* Click-through URL for this ad. This is a required field on insertion. Applicable when type is
* AD_SERVING_CLICK_TRACKER.
* @param clickThroughUrl clickThroughUrl or {@code null} for none
*/
public Ad setClickThroughUrl(ClickThroughUrl clickThroughUrl) {
this.clickThroughUrl = clickThroughUrl;
return this;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @return value or {@code null} for none
*/
public ClickThroughUrlSuffixProperties getClickThroughUrlSuffixProperties() {
return clickThroughUrlSuffixProperties;
}
/**
* Click-through URL suffix properties for this ad. Applies to the URL in the ad or (if overriding
* ad properties) the URL in the creative.
* @param clickThroughUrlSuffixProperties clickThroughUrlSuffixProperties or {@code null} for none
*/
public Ad setClickThroughUrlSuffixProperties(ClickThroughUrlSuffixProperties clickThroughUrlSuffixProperties) {
this.clickThroughUrlSuffixProperties = clickThroughUrlSuffixProperties;
return this;
}
/**
* Comments for this ad.
* @return value or {@code null} for none
*/
public java.lang.String getComments() {
return comments;
}
/**
* Comments for this ad.
* @param comments comments or {@code null} for none
*/
public Ad setComments(java.lang.String comments) {
this.comments = comments;
return this;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @return value or {@code null} for none
*/
public java.lang.String getCompatibility() {
return compatibility;
}
/**
* Compatibility of this ad. Applicable when type is AD_SERVING_DEFAULT_AD. DISPLAY and
* DISPLAY_INTERSTITIAL refer to either rendering on desktop or on mobile devices or in mobile
* apps for regular or interstitial ads, respectively. APP and APP_INTERSTITIAL are only used for
* existing default ads. New mobile placements must be assigned DISPLAY or DISPLAY_INTERSTITIAL
* and default ads created for those placements will be limited to those compatibility types.
* IN_STREAM_VIDEO refers to rendering in-stream video ads developed with the VAST standard.
* @param compatibility compatibility or {@code null} for none
*/
public Ad setCompatibility(java.lang.String compatibility) {
this.compatibility = compatibility;
return this;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getCreateInfo() {
return createInfo;
}
/**
* Information about the creation of this ad. This is a read-only field.
* @param createInfo createInfo or {@code null} for none
*/
public Ad setCreateInfo(LastModifiedInfo createInfo) {
this.createInfo = createInfo;
return this;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @return value or {@code null} for none
*/
public java.util.List<CreativeGroupAssignment> getCreativeGroupAssignments() {
return creativeGroupAssignments;
}
/**
* Creative group assignments for this ad. Applicable when type is AD_SERVING_CLICK_TRACKER. Only
* one assignment per creative group number is allowed for a maximum of two assignments.
* @param creativeGroupAssignments creativeGroupAssignments or {@code null} for none
*/
public Ad setCreativeGroupAssignments(java.util.List<CreativeGroupAssignment> creativeGroupAssignments) {
this.creativeGroupAssignments = creativeGroupAssignments;
return this;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @return value or {@code null} for none
*/
public CreativeRotation getCreativeRotation() {
return creativeRotation;
}
/**
* Creative rotation for this ad. Applicable when type is AD_SERVING_DEFAULT_AD,
* AD_SERVING_STANDARD_AD, or AD_SERVING_TRACKING. When type is AD_SERVING_DEFAULT_AD, this field
* should have exactly one creativeAssignment .
* @param creativeRotation creativeRotation or {@code null} for none
*/
public Ad setCreativeRotation(CreativeRotation creativeRotation) {
this.creativeRotation = creativeRotation;
return this;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DayPartTargeting getDayPartTargeting() {
return dayPartTargeting;
}
/**
* Time and day targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param dayPartTargeting dayPartTargeting or {@code null} for none
*/
public Ad setDayPartTargeting(DayPartTargeting dayPartTargeting) {
this.dayPartTargeting = dayPartTargeting;
return this;
}
/**
* Default click-through event tag properties for this ad.
* @return value or {@code null} for none
*/
public DefaultClickThroughEventTagProperties getDefaultClickThroughEventTagProperties() {
return defaultClickThroughEventTagProperties;
}
/**
* Default click-through event tag properties for this ad.
* @param defaultClickThroughEventTagProperties defaultClickThroughEventTagProperties or {@code null} for none
*/
public Ad setDefaultClickThroughEventTagProperties(DefaultClickThroughEventTagProperties defaultClickThroughEventTagProperties) {
this.defaultClickThroughEventTagProperties = defaultClickThroughEventTagProperties;
return this;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public DeliverySchedule getDeliverySchedule() {
return deliverySchedule;
}
/**
* Delivery schedule information for this ad. Applicable when type is AD_SERVING_STANDARD_AD or
* AD_SERVING_TRACKING. This field along with subfields priority and impressionRatio are required
* on insertion when type is AD_SERVING_STANDARD_AD.
* @param deliverySchedule deliverySchedule or {@code null} for none
*/
public Ad setDeliverySchedule(DeliverySchedule deliverySchedule) {
this.deliverySchedule = deliverySchedule;
return this;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDynamicClickTracker() {
return dynamicClickTracker;
}
/**
* Whether this ad is a dynamic click tracker. Applicable when type is AD_SERVING_CLICK_TRACKER.
* This is a required field on insert, and is read-only after insert.
* @param dynamicClickTracker dynamicClickTracker or {@code null} for none
*/
public Ad setDynamicClickTracker(java.lang.Boolean dynamicClickTracker) {
this.dynamicClickTracker = dynamicClickTracker;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getEndTime() {
return endTime;
}
/**
* @param endTime endTime or {@code null} for none
*/
public Ad setEndTime(com.google.api.client.util.DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Event tag overrides for this ad.
* @return value or {@code null} for none
*/
public java.util.List<EventTagOverride> getEventTagOverrides() {
return eventTagOverrides;
}
/**
* Event tag overrides for this ad.
* @param eventTagOverrides eventTagOverrides or {@code null} for none
*/
public Ad setEventTagOverrides(java.util.List<EventTagOverride> eventTagOverrides) {
this.eventTagOverrides = eventTagOverrides;
return this;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public GeoTargeting getGeoTargeting() {
return geoTargeting;
}
/**
* Geographical targeting information for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param geoTargeting geoTargeting or {@code null} for none
*/
public Ad setGeoTargeting(GeoTargeting geoTargeting) {
this.geoTargeting = geoTargeting;
return this;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public java.lang.Long getId() {
return id;
}
/**
* ID of this ad. This is a read-only, auto-generated field.
* @param id id or {@code null} for none
*/
public Ad setId(java.lang.Long id) {
this.id = id;
return this;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @return value or {@code null} for none
*/
public DimensionValue getIdDimensionValue() {
return idDimensionValue;
}
/**
* Dimension value for the ID of this ad. This is a read-only, auto-generated field.
* @param idDimensionValue idDimensionValue or {@code null} for none
*/
public Ad setIdDimensionValue(DimensionValue idDimensionValue) {
this.idDimensionValue = idDimensionValue;
return this;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public KeyValueTargetingExpression getKeyValueTargetingExpression() {
return keyValueTargetingExpression;
}
/**
* Key-value targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param keyValueTargetingExpression keyValueTargetingExpression or {@code null} for none
*/
public Ad setKeyValueTargetingExpression(KeyValueTargetingExpression keyValueTargetingExpression) {
this.keyValueTargetingExpression = keyValueTargetingExpression;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "dfareporting#ad".
* @param kind kind or {@code null} for none
*/
public Ad setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public LanguageTargeting getLanguageTargeting() {
return languageTargeting;
}
/**
* Language targeting information for this ad. This field must be left blank if the ad is using a
* targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param languageTargeting languageTargeting or {@code null} for none
*/
public Ad setLanguageTargeting(LanguageTargeting languageTargeting) {
this.languageTargeting = languageTargeting;
return this;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @return value or {@code null} for none
*/
public LastModifiedInfo getLastModifiedInfo() {
return lastModifiedInfo;
}
/**
* Information about the most recent modification of this ad. This is a read-only field.
* @param lastModifiedInfo lastModifiedInfo or {@code null} for none
*/
public Ad setLastModifiedInfo(LastModifiedInfo lastModifiedInfo) {
this.lastModifiedInfo = lastModifiedInfo;
return this;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of this ad. This is a required field and must be less than 256 characters long.
* @param name name or {@code null} for none
*/
public Ad setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Placement assignments for this ad.
* @return value or {@code null} for none
*/
public java.util.List<PlacementAssignment> getPlacementAssignments() {
return placementAssignments;
}
/**
* Placement assignments for this ad.
* @param placementAssignments placementAssignments or {@code null} for none
*/
public Ad setPlacementAssignments(java.util.List<PlacementAssignment> placementAssignments) {
this.placementAssignments = placementAssignments;
return this;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public ListTargetingExpression getRemarketingListExpression() {
return remarketingListExpression;
}
/**
* Remarketing list targeting expression for this ad. This field must be left blank if the ad is
* using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param remarketingListExpression remarketingListExpression or {@code null} for none
*/
public Ad setRemarketingListExpression(ListTargetingExpression remarketingListExpression) {
this.remarketingListExpression = remarketingListExpression;
return this;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @return value or {@code null} for none
*/
public Size getSize() {
return size;
}
/**
* Size of this ad. Applicable when type is AD_SERVING_DEFAULT_AD.
* @param size size or {@code null} for none
*/
public Ad setSize(Size size) {
this.size = size;
return this;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslCompliant() {
return sslCompliant;
}
/**
* Whether this ad is ssl compliant. This is a read-only field that is auto-generated when the ad
* is inserted or updated.
* @param sslCompliant sslCompliant or {@code null} for none
*/
public Ad setSslCompliant(java.lang.Boolean sslCompliant) {
this.sslCompliant = sslCompliant;
return this;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSslRequired() {
return sslRequired;
}
/**
* Whether this ad requires ssl. This is a read-only field that is auto-generated when the ad is
* inserted or updated.
* @param sslRequired sslRequired or {@code null} for none
*/
public Ad setSslRequired(java.lang.Boolean sslRequired) {
this.sslRequired = sslRequired;
return this;
}
/**
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getStartTime() {
return startTime;
}
/**
* @param startTime startTime or {@code null} for none
*/
public Ad setStartTime(com.google.api.client.util.DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @return value or {@code null} for none
*/
public java.lang.Long getSubaccountId() {
return subaccountId;
}
/**
* Subaccount ID of this ad. This is a read-only field that can be left blank.
* @param subaccountId subaccountId or {@code null} for none
*/
public Ad setSubaccountId(java.lang.Long subaccountId) {
this.subaccountId = subaccountId;
return this;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public java.lang.Long getTargetingTemplateId() {
return targetingTemplateId;
}
/**
* Targeting template ID, used to apply preconfigured targeting information to this ad. This
* cannot be set while any of dayPartTargeting, geoTargeting, keyValueTargetingExpression,
* languageTargeting, remarketingListExpression, or technologyTargeting are set. Applicable when
* type is AD_SERVING_STANDARD_AD.
* @param targetingTemplateId targetingTemplateId or {@code null} for none
*/
public Ad setTargetingTemplateId(java.lang.Long targetingTemplateId) {
this.targetingTemplateId = targetingTemplateId;
return this;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @return value or {@code null} for none
*/
public TechnologyTargeting getTechnologyTargeting() {
return technologyTargeting;
}
/**
* Technology platform targeting information for this ad. This field must be left blank if the ad
* is using a targeting template. Applicable when type is AD_SERVING_STANDARD_AD.
* @param technologyTargeting technologyTargeting or {@code null} for none
*/
public Ad setTechnologyTargeting(TechnologyTargeting technologyTargeting) {
this.technologyTargeting = technologyTargeting;
return this;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* Type of ad. This is a required field on insertion. Note that default ads (
* AD_SERVING_DEFAULT_AD) cannot be created directly (see Creative resource).
* @param type type or {@code null} for none
*/
public Ad setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public Ad set(String fieldName, Object value) {
return (Ad) super.set(fieldName, value);
}
@Override
public Ad clone() {
return (Ad) super.clone();
}
}
|
googleapis/sdk-platform-java | 35,522 | java-common-protos/proto-google-common-protos/src/main/java/com/google/api/GoSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/client.proto
// Protobuf Java Version: 3.25.8
package com.google.api;
/**
*
*
* <pre>
* Settings for Go client libraries.
* </pre>
*
* Protobuf type {@code google.api.GoSettings}
*/
public final class GoSettings extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.GoSettings)
GoSettingsOrBuilder {
private static final long serialVersionUID = 0L;
// Use GoSettings.newBuilder() to construct.
private GoSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GoSettings() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GoSettings();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.ClientProto.internal_static_google_api_GoSettings_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetRenamedServices();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ClientProto.internal_static_google_api_GoSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.GoSettings.class, com.google.api.GoSettings.Builder.class);
}
private int bitField0_;
public static final int COMMON_FIELD_NUMBER = 1;
private com.google.api.CommonLanguageSettings common_;
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*
* @return Whether the common field is set.
*/
@java.lang.Override
public boolean hasCommon() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*
* @return The common.
*/
@java.lang.Override
public com.google.api.CommonLanguageSettings getCommon() {
return common_ == null ? com.google.api.CommonLanguageSettings.getDefaultInstance() : common_;
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
@java.lang.Override
public com.google.api.CommonLanguageSettingsOrBuilder getCommonOrBuilder() {
return common_ == null ? com.google.api.CommonLanguageSettings.getDefaultInstance() : common_;
}
public static final int RENAMED_SERVICES_FIELD_NUMBER = 2;
private static final class RenamedServicesDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.api.ClientProto
.internal_static_google_api_GoSettings_RenamedServicesEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> renamedServices_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetRenamedServices() {
if (renamedServices_ == null) {
return com.google.protobuf.MapField.emptyMapField(
RenamedServicesDefaultEntryHolder.defaultEntry);
}
return renamedServices_;
}
public int getRenamedServicesCount() {
return internalGetRenamedServices().getMap().size();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public boolean containsRenamedServices(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetRenamedServices().getMap().containsKey(key);
}
/** Use {@link #getRenamedServicesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getRenamedServices() {
return getRenamedServicesMap();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getRenamedServicesMap() {
return internalGetRenamedServices().getMap();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getRenamedServicesOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetRenamedServices().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public java.lang.String getRenamedServicesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetRenamedServices().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCommon());
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetRenamedServices(), RenamedServicesDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCommon());
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetRenamedServices().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> renamedServices__ =
RenamedServicesDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, renamedServices__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.GoSettings)) {
return super.equals(obj);
}
com.google.api.GoSettings other = (com.google.api.GoSettings) obj;
if (hasCommon() != other.hasCommon()) return false;
if (hasCommon()) {
if (!getCommon().equals(other.getCommon())) return false;
}
if (!internalGetRenamedServices().equals(other.internalGetRenamedServices())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCommon()) {
hash = (37 * hash) + COMMON_FIELD_NUMBER;
hash = (53 * hash) + getCommon().hashCode();
}
if (!internalGetRenamedServices().getMap().isEmpty()) {
hash = (37 * hash) + RENAMED_SERVICES_FIELD_NUMBER;
hash = (53 * hash) + internalGetRenamedServices().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.GoSettings parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.GoSettings parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.GoSettings parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.GoSettings parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.GoSettings parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.GoSettings parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.GoSettings parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.GoSettings parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.GoSettings parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.GoSettings parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.GoSettings parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.GoSettings parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.GoSettings prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Settings for Go client libraries.
* </pre>
*
* Protobuf type {@code google.api.GoSettings}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.GoSettings)
com.google.api.GoSettingsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.ClientProto.internal_static_google_api_GoSettings_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetRenamedServices();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableRenamedServices();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ClientProto.internal_static_google_api_GoSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.GoSettings.class, com.google.api.GoSettings.Builder.class);
}
// Construct using com.google.api.GoSettings.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCommonFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
common_ = null;
if (commonBuilder_ != null) {
commonBuilder_.dispose();
commonBuilder_ = null;
}
internalGetMutableRenamedServices().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.ClientProto.internal_static_google_api_GoSettings_descriptor;
}
@java.lang.Override
public com.google.api.GoSettings getDefaultInstanceForType() {
return com.google.api.GoSettings.getDefaultInstance();
}
@java.lang.Override
public com.google.api.GoSettings build() {
com.google.api.GoSettings result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.GoSettings buildPartial() {
com.google.api.GoSettings result = new com.google.api.GoSettings(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.api.GoSettings result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.common_ = commonBuilder_ == null ? common_ : commonBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.renamedServices_ = internalGetRenamedServices();
result.renamedServices_.makeImmutable();
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.GoSettings) {
return mergeFrom((com.google.api.GoSettings) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.GoSettings other) {
if (other == com.google.api.GoSettings.getDefaultInstance()) return this;
if (other.hasCommon()) {
mergeCommon(other.getCommon());
}
internalGetMutableRenamedServices().mergeFrom(other.internalGetRenamedServices());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCommonFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> renamedServices__ =
input.readMessage(
RenamedServicesDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableRenamedServices()
.getMutableMap()
.put(renamedServices__.getKey(), renamedServices__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.api.CommonLanguageSettings common_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.CommonLanguageSettings,
com.google.api.CommonLanguageSettings.Builder,
com.google.api.CommonLanguageSettingsOrBuilder>
commonBuilder_;
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*
* @return Whether the common field is set.
*/
public boolean hasCommon() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*
* @return The common.
*/
public com.google.api.CommonLanguageSettings getCommon() {
if (commonBuilder_ == null) {
return common_ == null
? com.google.api.CommonLanguageSettings.getDefaultInstance()
: common_;
} else {
return commonBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public Builder setCommon(com.google.api.CommonLanguageSettings value) {
if (commonBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
common_ = value;
} else {
commonBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public Builder setCommon(com.google.api.CommonLanguageSettings.Builder builderForValue) {
if (commonBuilder_ == null) {
common_ = builderForValue.build();
} else {
commonBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public Builder mergeCommon(com.google.api.CommonLanguageSettings value) {
if (commonBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& common_ != null
&& common_ != com.google.api.CommonLanguageSettings.getDefaultInstance()) {
getCommonBuilder().mergeFrom(value);
} else {
common_ = value;
}
} else {
commonBuilder_.mergeFrom(value);
}
if (common_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public Builder clearCommon() {
bitField0_ = (bitField0_ & ~0x00000001);
common_ = null;
if (commonBuilder_ != null) {
commonBuilder_.dispose();
commonBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public com.google.api.CommonLanguageSettings.Builder getCommonBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCommonFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
public com.google.api.CommonLanguageSettingsOrBuilder getCommonOrBuilder() {
if (commonBuilder_ != null) {
return commonBuilder_.getMessageOrBuilder();
} else {
return common_ == null
? com.google.api.CommonLanguageSettings.getDefaultInstance()
: common_;
}
}
/**
*
*
* <pre>
* Some settings.
* </pre>
*
* <code>.google.api.CommonLanguageSettings common = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.api.CommonLanguageSettings,
com.google.api.CommonLanguageSettings.Builder,
com.google.api.CommonLanguageSettingsOrBuilder>
getCommonFieldBuilder() {
if (commonBuilder_ == null) {
commonBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.api.CommonLanguageSettings,
com.google.api.CommonLanguageSettings.Builder,
com.google.api.CommonLanguageSettingsOrBuilder>(
getCommon(), getParentForChildren(), isClean());
common_ = null;
}
return commonBuilder_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> renamedServices_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetRenamedServices() {
if (renamedServices_ == null) {
return com.google.protobuf.MapField.emptyMapField(
RenamedServicesDefaultEntryHolder.defaultEntry);
}
return renamedServices_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableRenamedServices() {
if (renamedServices_ == null) {
renamedServices_ =
com.google.protobuf.MapField.newMapField(
RenamedServicesDefaultEntryHolder.defaultEntry);
}
if (!renamedServices_.isMutable()) {
renamedServices_ = renamedServices_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return renamedServices_;
}
public int getRenamedServicesCount() {
return internalGetRenamedServices().getMap().size();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public boolean containsRenamedServices(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetRenamedServices().getMap().containsKey(key);
}
/** Use {@link #getRenamedServicesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getRenamedServices() {
return getRenamedServicesMap();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getRenamedServicesMap() {
return internalGetRenamedServices().getMap();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getRenamedServicesOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetRenamedServices().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
@java.lang.Override
public java.lang.String getRenamedServicesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetRenamedServices().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearRenamedServices() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableRenamedServices().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
public Builder removeRenamedServices(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableRenamedServices().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableRenamedServices() {
bitField0_ |= 0x00000002;
return internalGetMutableRenamedServices().getMutableMap();
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
public Builder putRenamedServices(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableRenamedServices().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* Map of service names to renamed services. Keys are the package relative
* service names and values are the name to be used for the service client
* and call options.
*
* publishing:
* go_settings:
* renamed_services:
* Publisher: TopicAdmin
* </pre>
*
* <code>map<string, string> renamed_services = 2;</code>
*/
public Builder putAllRenamedServices(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableRenamedServices().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.GoSettings)
}
// @@protoc_insertion_point(class_scope:google.api.GoSettings)
private static final com.google.api.GoSettings DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.GoSettings();
}
public static com.google.api.GoSettings getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GoSettings> PARSER =
new com.google.protobuf.AbstractParser<GoSettings>() {
@java.lang.Override
public GoSettings parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GoSettings> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GoSettings> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.GoSettings getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,532 | java-eventarc/proto-google-cloud-eventarc-v1/src/main/java/com/google/cloud/eventarc/v1/UpdateChannelRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/eventarc/v1/eventarc.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.eventarc.v1;
/**
*
*
* <pre>
* The request message for the UpdateChannel method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.UpdateChannelRequest}
*/
public final class UpdateChannelRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.eventarc.v1.UpdateChannelRequest)
UpdateChannelRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateChannelRequest.newBuilder() to construct.
private UpdateChannelRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateChannelRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateChannelRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateChannelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateChannelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.UpdateChannelRequest.class,
com.google.cloud.eventarc.v1.UpdateChannelRequest.Builder.class);
}
private int bitField0_;
public static final int CHANNEL_FIELD_NUMBER = 1;
private com.google.cloud.eventarc.v1.Channel channel_;
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*
* @return Whether the channel field is set.
*/
@java.lang.Override
public boolean hasChannel() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*
* @return The channel.
*/
@java.lang.Override
public com.google.cloud.eventarc.v1.Channel getChannel() {
return channel_ == null ? com.google.cloud.eventarc.v1.Channel.getDefaultInstance() : channel_;
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
@java.lang.Override
public com.google.cloud.eventarc.v1.ChannelOrBuilder getChannelOrBuilder() {
return channel_ == null ? com.google.cloud.eventarc.v1.Channel.getDefaultInstance() : channel_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 3;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the review, but do not
* post it.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getChannel());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (validateOnly_ != false) {
output.writeBool(3, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getChannel());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.eventarc.v1.UpdateChannelRequest)) {
return super.equals(obj);
}
com.google.cloud.eventarc.v1.UpdateChannelRequest other =
(com.google.cloud.eventarc.v1.UpdateChannelRequest) obj;
if (hasChannel() != other.hasChannel()) return false;
if (hasChannel()) {
if (!getChannel().equals(other.getChannel())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasChannel()) {
hash = (37 * hash) + CHANNEL_FIELD_NUMBER;
hash = (53 * hash) + getChannel().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.eventarc.v1.UpdateChannelRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the UpdateChannel method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.UpdateChannelRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.eventarc.v1.UpdateChannelRequest)
com.google.cloud.eventarc.v1.UpdateChannelRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateChannelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateChannelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.UpdateChannelRequest.class,
com.google.cloud.eventarc.v1.UpdateChannelRequest.Builder.class);
}
// Construct using com.google.cloud.eventarc.v1.UpdateChannelRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getChannelFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
channel_ = null;
if (channelBuilder_ != null) {
channelBuilder_.dispose();
channelBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateChannelRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateChannelRequest getDefaultInstanceForType() {
return com.google.cloud.eventarc.v1.UpdateChannelRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateChannelRequest build() {
com.google.cloud.eventarc.v1.UpdateChannelRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateChannelRequest buildPartial() {
com.google.cloud.eventarc.v1.UpdateChannelRequest result =
new com.google.cloud.eventarc.v1.UpdateChannelRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.eventarc.v1.UpdateChannelRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.channel_ = channelBuilder_ == null ? channel_ : channelBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.eventarc.v1.UpdateChannelRequest) {
return mergeFrom((com.google.cloud.eventarc.v1.UpdateChannelRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.eventarc.v1.UpdateChannelRequest other) {
if (other == com.google.cloud.eventarc.v1.UpdateChannelRequest.getDefaultInstance())
return this;
if (other.hasChannel()) {
mergeChannel(other.getChannel());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getChannelFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.eventarc.v1.Channel channel_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.Channel,
com.google.cloud.eventarc.v1.Channel.Builder,
com.google.cloud.eventarc.v1.ChannelOrBuilder>
channelBuilder_;
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*
* @return Whether the channel field is set.
*/
public boolean hasChannel() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*
* @return The channel.
*/
public com.google.cloud.eventarc.v1.Channel getChannel() {
if (channelBuilder_ == null) {
return channel_ == null
? com.google.cloud.eventarc.v1.Channel.getDefaultInstance()
: channel_;
} else {
return channelBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public Builder setChannel(com.google.cloud.eventarc.v1.Channel value) {
if (channelBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
channel_ = value;
} else {
channelBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public Builder setChannel(com.google.cloud.eventarc.v1.Channel.Builder builderForValue) {
if (channelBuilder_ == null) {
channel_ = builderForValue.build();
} else {
channelBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public Builder mergeChannel(com.google.cloud.eventarc.v1.Channel value) {
if (channelBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& channel_ != null
&& channel_ != com.google.cloud.eventarc.v1.Channel.getDefaultInstance()) {
getChannelBuilder().mergeFrom(value);
} else {
channel_ = value;
}
} else {
channelBuilder_.mergeFrom(value);
}
if (channel_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public Builder clearChannel() {
bitField0_ = (bitField0_ & ~0x00000001);
channel_ = null;
if (channelBuilder_ != null) {
channelBuilder_.dispose();
channelBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public com.google.cloud.eventarc.v1.Channel.Builder getChannelBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getChannelFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
public com.google.cloud.eventarc.v1.ChannelOrBuilder getChannelOrBuilder() {
if (channelBuilder_ != null) {
return channelBuilder_.getMessageOrBuilder();
} else {
return channel_ == null
? com.google.cloud.eventarc.v1.Channel.getDefaultInstance()
: channel_;
}
}
/**
*
*
* <pre>
* The channel to be updated.
* </pre>
*
* <code>.google.cloud.eventarc.v1.Channel channel = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.Channel,
com.google.cloud.eventarc.v1.Channel.Builder,
com.google.cloud.eventarc.v1.ChannelOrBuilder>
getChannelFieldBuilder() {
if (channelBuilder_ == null) {
channelBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.Channel,
com.google.cloud.eventarc.v1.Channel.Builder,
com.google.cloud.eventarc.v1.ChannelOrBuilder>(
getChannel(), getParentForChildren(), isClean());
channel_ = null;
}
return channelBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the review, but do not
* post it.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the review, but do not
* post it.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the review, but do not
* post it.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.eventarc.v1.UpdateChannelRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.eventarc.v1.UpdateChannelRequest)
private static final com.google.cloud.eventarc.v1.UpdateChannelRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.eventarc.v1.UpdateChannelRequest();
}
public static com.google.cloud.eventarc.v1.UpdateChannelRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateChannelRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateChannelRequest>() {
@java.lang.Override
public UpdateChannelRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateChannelRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateChannelRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateChannelRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/differential-privacy | 35,899 | java/tests/com/google/privacy/differentialprivacy/PreAggSelectPartitionTest.java | //
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.google.privacy.differentialprivacy;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import static org.junit.Assert.assertThrows;
import com.google.auto.value.AutoValue;
import com.google.privacy.differentialprivacy.proto.SummaryOuterClass.PreAggSelectPartitionSummary;
import com.google.protobuf.InvalidProtocolBufferException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.runners.Parameterized;
@RunWith(Enclosed.class)
public class PreAggSelectPartitionTest {
private static final double EPSILON = Math.log(2);
private static final double LOW_EPSILON = Math.log(1.5);
private static final double HIGH_EPSILON = 50;
private static final double HIGH_DELTA = 1 - 1e-15;
private static final double DELTA = 0.1;
private static final double LOW_DELTA = 1e-200;
private static final int ONE_PARTITION_CONTRIBUTED = 1;
@RunWith(JUnit4.class)
public static final class NonParameterizedTests {
private PreAggSelectPartition preAggSelectPartition;
@Before
public void setUp() {
preAggSelectPartition = getPreAggSelectPartitionBuilderWithFields().build();
}
@Test
public void increment_calledAfterShouldKeepPartition_throwsException() {
preAggSelectPartition.shouldKeepPartition();
assertThrows(IllegalStateException.class, () -> preAggSelectPartition.increment());
}
@Test
public void increment_calledAfterSerialize_throwsException() {
preAggSelectPartition.getSerializableSummary();
assertThrows(IllegalStateException.class, () -> preAggSelectPartition.increment());
}
@Test
public void incrementBy_allowsNegativeValues() {
PreAggSelectPartition largeDeltaPreAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(HIGH_EPSILON)
.delta(HIGH_DELTA)
.maxPartitionsContributed(1)
.build();
// We can't access the value of the count, so instead we test that adding and subtracting the
// same large count results in a deterministic false when shouldKeepPartition is called.
// If negative values are ignored, shouldKeepPartition would return true.
largeDeltaPreAggSelectPartition.incrementBy(100);
largeDeltaPreAggSelectPartition.incrementBy(-100);
assertThat(largeDeltaPreAggSelectPartition.shouldKeepPartition()).isFalse();
}
// An attempt to compute the result several times should throw an exception.
@Test
public void computeResult_multipleCalls_throwsException() {
preAggSelectPartition.shouldKeepPartition();
assertThrows(IllegalStateException.class, preAggSelectPartition::shouldKeepPartition);
}
@Test
public void getSerializableSummary_copiesIdsCountCorrectly() {
preAggSelectPartition.increment();
preAggSelectPartition.increment();
preAggSelectPartition.increment();
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getIdsCount()).isEqualTo(3);
}
@Test
public void incrementByAndGetSerializableSummary_copiesIdsCountCorrectly() {
preAggSelectPartition.incrementBy(5);
preAggSelectPartition.incrementBy(5);
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getIdsCount()).isEqualTo(10);
}
@Test
public void getSerializableSummary_copiesZeroIdsCountCorrectly() {
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getIdsCount()).isEqualTo(0);
}
@Test
public void getSerializableSummary_copiesPreThresholdCorrectly() {
PreAggSelectPartitionSummary summary =
getSummary(getPreAggSelectPartitionBuilderWithFields().preThreshold(2).build());
assertThat(summary.getPreThreshold()).isEqualTo(2);
}
@Test
public void getSerializableSummary_copiesDefaultPreThresholdCorrectly() {
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getPreThreshold()).isEqualTo(1);
}
@Test
public void getSerializableSummary_calledAfterComputeResult_throwsException() {
preAggSelectPartition.shouldKeepPartition();
assertThrows(
IllegalStateException.class, () -> preAggSelectPartition.getSerializableSummary());
}
@Test
public void getSerializableSummary_multipleCalls_returnsSameSummary() {
preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(EPSILON)
.delta(DELTA)
.maxPartitionsContributed(1)
.build();
preAggSelectPartition.increment();
byte[] summary1 = preAggSelectPartition.getSerializableSummary();
byte[] summary2 = preAggSelectPartition.getSerializableSummary();
assertThat(summary1).isEqualTo(summary2);
}
@Test
public void computeResult_calledAfterSerialize_throwsException() {
preAggSelectPartition.getSerializableSummary();
assertThrows(IllegalStateException.class, () -> preAggSelectPartition.shouldKeepPartition());
}
@Test
public void getSerializableSummary_copiesEpsilonCorrectly() {
preAggSelectPartition = getPreAggSelectPartitionBuilderWithFields().epsilon(EPSILON).build();
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getEpsilon()).isEqualTo(EPSILON);
}
@Test
public void getSerializableSummary_copiesDeltaCorrectly() {
preAggSelectPartition = getPreAggSelectPartitionBuilderWithFields().delta(DELTA).build();
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getDelta()).isEqualTo(DELTA);
}
@Test
public void getSerializableSummary_copiesMaxPartitionsContributedCorrectly() {
int maxPartitionsContributed = 150;
preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields()
.maxPartitionsContributed(maxPartitionsContributed)
.build();
PreAggSelectPartitionSummary summary = getSummary(preAggSelectPartition);
assertThat(summary.getMaxPartitionsContributed()).isEqualTo(maxPartitionsContributed);
}
@Test
public void merge_basicExample_sumsIdsCounts() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
targetPreAggSelectPartition.increment();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
sourcePreAggSelectPartition.increment();
sourcePreAggSelectPartition.increment();
sourcePreAggSelectPartition.increment();
targetPreAggSelectPartition.mergeWith(sourcePreAggSelectPartition.getSerializableSummary());
// We expect it always be true because the total number of ids in the merged object will be
// equal to 4 and for that params and that count the probability of keeping that partition is
// equal to 1.
assertThat(targetPreAggSelectPartition.shouldKeepPartition()).isTrue();
}
@Test
public void merge_calledTwice_sumsIdsCounts() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
targetPreAggSelectPartition.increment();
PreAggSelectPartition sourcePreAggSelectPartition1 =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
sourcePreAggSelectPartition1.increment();
sourcePreAggSelectPartition1.increment();
PreAggSelectPartition sourcePreAggSelectPartition2 =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
sourcePreAggSelectPartition2.increment();
targetPreAggSelectPartition.mergeWith(sourcePreAggSelectPartition1.getSerializableSummary());
targetPreAggSelectPartition.mergeWith(sourcePreAggSelectPartition2.getSerializableSummary());
// We expect it always be true because the total number of ids in the merged object will be
// equal to 4 and for that params and that count the probability of keeping that partition is
// equal to 1.
assertThat(targetPreAggSelectPartition.shouldKeepPartition()).isTrue();
}
@Test
public void merge_differentEpsilon_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().epsilon(EPSILON).build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().epsilon(2 * EPSILON).build();
assertThrows(
IllegalArgumentException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void merge_differentDelta_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(DELTA).build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(2 * DELTA).build();
assertThrows(
IllegalArgumentException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void merge_differentMaxPartitionsContributed_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().maxPartitionsContributed(1).build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().maxPartitionsContributed(2).build();
assertThrows(
IllegalArgumentException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void merge_differentPreThreshold_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().preThreshold(1).build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().preThreshold(2).build();
assertThrows(
IllegalArgumentException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void merge_calledAfterComputeResult_onTargetCount_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().build();
targetPreAggSelectPartition.shouldKeepPartition();
assertThrows(
IllegalStateException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void merge_calledAfterComputeResult_onSourceCount_throwsException() {
PreAggSelectPartition targetPreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().build();
PreAggSelectPartition sourcePreAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().build();
sourcePreAggSelectPartition.shouldKeepPartition();
assertThrows(
IllegalStateException.class,
() ->
targetPreAggSelectPartition.mergeWith(
sourcePreAggSelectPartition.getSerializableSummary()));
}
@Test
public void sumExpPowers_oneValue() {
double expectedResult = 2.718281828459045;
double actualResult = preAggSelectPartition.sumExpPowers(1, 1, 1);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void sumExpPowers_threeValues() {
double expectedResult = 11.107337927389695;
double actualResult = preAggSelectPartition.sumExpPowers(1, 0, 3);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void sumExpPowers_negativeMinPower() {
double expectedResult = 0.5032147244080551;
double actualResult = preAggSelectPartition.sumExpPowers(1, -2, 2);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void sumExpPowers_nonIntegerEpsilon() {
double expectedResult = 13;
double actualResult = preAggSelectPartition.sumExpPowers(Math.log(3), 0, 3);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void sumExpPowers_largeEpsilon_positivePowers() {
assertThat(preAggSelectPartition.sumExpPowers(Double.MAX_VALUE, 1, 5)).isPositiveInfinity();
}
@Test
public void sumExpPowers_largeEpsilon_negativePowers() {
double expectedResult = 0;
double actualResult = preAggSelectPartition.sumExpPowers(Double.MAX_VALUE, -5, 3);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void sumExpPowers_verySmallEpsilon() {
double expectedResult = 100;
// exp(-epsilon) = 0
double actualResult = preAggSelectPartition.sumExpPowers(1e-100, 0, 100);
assertThat(actualResult)
.isWithin(getTolerance(expectedResult, actualResult))
.of(expectedResult);
}
@Test
public void shouldSelectPartition_zeroIds_neverTrue() {
assertThat(preAggSelectPartition.getKeepPartitionProbability()).isEqualTo(0);
}
/**
* This test is non-deterministic. The probability of keeping a partition with that parameters
* is equal to 0.3. The number of trials is equal to 100000. The binomial distribution with
* parameters (100000, 0.3) yields a value in the interval (29017, 30989) with probability at
* least 1 - 1e-11. Dividing the interval endpoints by 100000, we see that the average is within
* 0.3 +/- 0.01 with high probability. Running this test has a 1e-11 flakiness rate, so we retry
* up to 2 times upon failure to drive the flakiness rate down to the a truly negligeable:
* (1e-11)^3 = 1e-33 flakiness rate.
*/
@Test
public void shouldKeepPartition_oneId_sometimesTrue() {
int numTrials = 100_000;
double expectedSelectionRate = 0.3;
double tolerance = 0.01;
List<Double> actualSelectionRates = new ArrayList<>();
for (int retry = 0; retry < 2; ++retry) {
int selections = 0;
for (int i = 0; i < numTrials; ++i) {
PreAggSelectPartition preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
preAggSelectPartition.increment();
if (preAggSelectPartition.shouldKeepPartition()) {
selections++;
}
}
double actualSelectionRate = (double) selections / (double) numTrials;
actualSelectionRates.add(actualSelectionRate);
}
// anyMatch is used to make the test pass if at least one of the retries succeeds.
assertThat(
actualSelectionRates.stream()
.anyMatch(
actualSelectionRate ->
abs(expectedSelectionRate - actualSelectionRate) <= tolerance))
.isTrue();
}
/**
* This test is non-deterministic. The keep partition probability with that parameters is equal
* to 0.3. The number of trials is equal to 100000. The binomial distribution with parameters
* (100000, 0.3) yields a value in the interval (29017, 30989) with probability at least 1 -
* 1e-11. Dividing the interval endpoints by 100000, we see that the average is within 0.3 +/-
* 0.01 with high probability. Running this test has a 1e-11 flakiness rate, so we retry up to 2
* times upon failure to drive the flakiness rate down to the a truly negligeable: (1e-11)^3 =
* 1e-33 flakiness rate.
*/
@Test
public void shouldKeepPartition_oneId_twoPartitionsContributed_sometimesTrue() {
int numTrials = 100_000;
double expectedSelectionRate = 0.3;
double tolerance = 0.01;
List<Double> actualSelectionRates = new ArrayList<>();
for (int retry = 0; retry < 2; ++retry) {
int selections = 0;
for (int i = 0; i < numTrials; ++i) {
PreAggSelectPartition preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields()
.delta(0.6)
.maxPartitionsContributed(2)
.build();
preAggSelectPartition.increment();
if (preAggSelectPartition.shouldKeepPartition()) {
selections++;
}
}
double actualSelectionRate = (double) selections / (double) numTrials;
actualSelectionRates.add(actualSelectionRate);
}
// anyMatch is used to make the test pass if at least one of the retries succeeds.
assertThat(
actualSelectionRates.stream()
.anyMatch(
actualSelectionRate ->
abs(expectedSelectionRate - actualSelectionRate) <= tolerance))
.isTrue();
}
/** This test is non-deterministic. It might fail with probability at most 1e-10. */
@Test
public void shouldKeepPartition_gaussianPartitionSelection_keepsLargePartitions() {
PreAggSelectPartition preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(5)
// delta is split equally between noise and threshold
.delta(2e-15)
// maxPartitionsContributed >= 3 triggers Gaussian thresholding
.maxPartitionsContributed(5)
.build();
preAggSelectPartition.incrementBy(55);
// For epsilon = 5, noiseDelta = thresholdDelta = 1e-15, l0Sensitivity = 5
// the noise is at most +-24 with probability (1 - 1e-10) and the threshold is 30.
// Hence, in the majority of the cases, a partition with >= 55 privacy IDs should be
// kept after noise addition.
// To compute the threshold value, use GaussianNoise.computeQuantile(...).
assertThat(preAggSelectPartition.shouldKeepPartition()).isTrue();
}
/**
* Similar to {@link #shouldKeepPartition_gaussianPartitionSelection_keepsLargePartitions} but
* this sets a larger initial number of unique contributions and a threshold value such that the
* thresholded number of contributions is the same. This test is non-deterministic. It might
* fail with probability at most 1e-10.
*/
@Test
public void shouldKeepPartition_gaussianPartitionSelectionPreThreshold_keepsLargePartitions() {
PreAggSelectPartition preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(5)
// delta is split equally between noise and threshold
.delta(2e-15)
// maxPartitionsContributed >= 3 triggers Gaussian thresholding
.maxPartitionsContributed(5)
.preThreshold(6)
.build();
preAggSelectPartition.incrementBy(60);
// For epsilon = 5, noiseDelta = thresholdDelta = 1e-15, l0Sensitivity = 5
// the noise is at most +-24 with probability (1 - 1e-10) and the threshold is 30.
// Hence, in the majority of the cases, a partition with >= 55 privacy IDs should be
// kept after noise addition.
// With pre-thresholding = 6, contributions = (60 - (6 - 1)) = 55.
// To compute the threshold value, use GaussianNoise.computeQuantile(...).
assertThat(preAggSelectPartition.shouldKeepPartition()).isTrue();
}
/** This test is non-deterministic. It might fail with probability at most 1e-10. */
@Test
public void shouldKeepPartition_gaussianPartitionSelection_dropsSmallPartitions() {
PreAggSelectPartition preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(5)
// delta is split equally between noise and threshold
.delta(2e-15)
// maxPartitionsContributed >= 3 triggers Gaussian thresholding
.maxPartitionsContributed(5)
.build();
preAggSelectPartition.incrementBy(5);
// For epsilon = 5, noiseDelta = thresholdDelta = 1e-15, l0Sensitivity = 5
// the noise is at most +-24 with probability (1 - 1e-10) and the threshold is 30.
// Hence, in the majority of the cases, a partition with <= 5 privacy IDs should be
// dropped after noise addition.
// To compute the threshold value, use GaussianNoise.computeQuantile(...).
assertThat(preAggSelectPartition.shouldKeepPartition()).isFalse();
}
/**
* Similar to {@link #shouldKeepPartition_gaussianPartitionSelection_dropsSmallPartitions} but
* this sets a larger initial number of unique contributions and a threshold value such that the
* thresholded number of contributions is the same. This test is non-deterministic. It might
* fail with probability at most 1e-10.
*/
@Test
public void shouldKeepPartition_gaussianPartitionSelectionPreThreshold_dropsSmallPartitions() {
PreAggSelectPartition preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(5)
// delta is split equally between noise and threshold
.delta(2e-15)
// maxPartitionsContributed >= 3 triggers Gaussian thresholding
.maxPartitionsContributed(5)
.preThreshold(6)
.build();
preAggSelectPartition.incrementBy(10);
// For epsilon = 5, noiseDelta = thresholdDelta = 1e-15, l0Sensitivity = 5
// the noise is at most +-24 with probability (1 - 1e-10) and the threshold is 30.
// Hence, in the majority of the cases, a partition with <= 5 privacy IDs should be
// dropped after noise addition.
// With pre-thresholding = 6, contributions = (10 - (6 - 1)) = 5.
// To compute the threshold value, use GaussianNoise.computeQuantile(...).
assertThat(preAggSelectPartition.shouldKeepPartition()).isFalse();
}
/**
* Keep partition probability for that parameters is equal to 1. So the algorithm should always
* return true for such partitions.
*/
@Test
public void shouldKeepPartition_fourIds_alwaysTrue() {
PreAggSelectPartition preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).build();
preAggSelectPartition.increment();
preAggSelectPartition.increment();
preAggSelectPartition.increment();
preAggSelectPartition.increment();
assertThat(preAggSelectPartition.shouldKeepPartition()).isTrue();
}
/**
* Set pre-thresholding larger than the number of contributions, it should return false
* deterministically.
*/
@Test
public void shouldKeepPartition_inputValueOneLessThanPreThreshold_returnsFalse() {
PreAggSelectPartition preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().preThreshold(2).build();
preAggSelectPartition.increment();
assertThat(preAggSelectPartition.shouldKeepPartition()).isFalse();
}
/**
* Similar logic to {@link #shouldKeepPartition_oneId_sometimesTrue} but this tests
* preThresholding on 100 unique user contributions because pre-thresholding decrements the
* number of unique user contributions.
*/
@Test
public void shouldKeepPartition_nonDefaultPreThreshold_hasExpectedSelectionRate() {
int numTrials = 100_000;
double expectedSelectionRate = 0.3;
double tolerance = 0.01;
List<Double> actualSelectionRates = new ArrayList<>();
for (int retry = 0; retry < 2; ++retry) {
int selections = 0;
for (int i = 0; i < numTrials; ++i) {
PreAggSelectPartition preAggSelectPartition =
getPreAggSelectPartitionBuilderWithFields().delta(0.3).preThreshold(100).build();
preAggSelectPartition.incrementBy(100);
// With pre-thresholding = 100, contributions = (100 - (100 - 1)) = 1
if (preAggSelectPartition.shouldKeepPartition()) {
selections++;
}
}
double actualSelectionRate = (double) selections / (double) numTrials;
actualSelectionRates.add(actualSelectionRate);
}
// anyMatch is used to make the test pass if at least one of the retries succeeds.
assertThat(
actualSelectionRates.stream()
.anyMatch(
actualSelectionRate ->
abs(expectedSelectionRate - actualSelectionRate) <= tolerance))
.isTrue();
}
}
@RunWith(Parameterized.class)
public static class KeepPartitionProbabilityTests {
private final KeepPartitionProbabilityTestCase testCase;
public KeepPartitionProbabilityTests(KeepPartitionProbabilityTestCase testCase) {
this.testCase = testCase;
}
@Test
public void keepPartitionProbability() {
PreAggSelectPartition preAggSelectPartition =
PreAggSelectPartition.builder()
.epsilon(testCase.epsilon())
.delta(testCase.delta())
.maxPartitionsContributed(testCase.maxPartitionsContributed())
.build();
for (int i = 0; i < testCase.idsCount(); ++i) {
preAggSelectPartition.increment();
}
double actualProbability = preAggSelectPartition.getKeepPartitionProbability();
double tolerance = getTolerance(actualProbability, testCase.expectedProbability());
assertWithMessage(
"Pr[ε = %s, δ = %s, maxPartitionsContributed = %s, idsCount = %s] = %s",
testCase.epsilon(),
testCase.delta(),
testCase.maxPartitionsContributed(),
testCase.idsCount(),
actualProbability)
.that(actualProbability)
.isWithin(tolerance)
.of(testCase.expectedProbability());
}
@Parameterized.Parameters(name = "{index}: = failed")
public static Iterable<Object> getKeepPartitionProbabilityTestCases() {
// seed is equal to 1 to make it deterministic.
Random random = new Random(1);
return Arrays.asList(
// ε = ln2, δ = 0.1, 1 partition contributed, idsCount = 0 .. 5 .. (for counts >= 5 the
// probability is always = 1)
KeepPartitionProbabilityTestCase.create(EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 0, 0),
KeepPartitionProbabilityTestCase.create(
EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 1, 0.1),
KeepPartitionProbabilityTestCase.create(
EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 2, 0.3),
KeepPartitionProbabilityTestCase.create(
EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 3, 0.7),
KeepPartitionProbabilityTestCase.create(
EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 4, 0.9),
KeepPartitionProbabilityTestCase.create(EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 5, 1),
KeepPartitionProbabilityTestCase.create(
EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, random.nextInt(1000) + 5, 1),
// ε = 2 * ln2, δ = 0.2, 2 partitions contributed, idsCount = 0 .. 5 .. (for counts >= 5
// the probability is always = 1)
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 0, 0),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 1, 0.1),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 2, 0.3),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 3, 0.7),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 4, 0.9),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, 5, 1),
KeepPartitionProbabilityTestCase.create(
2 * EPSILON, 2 * DELTA, 2 * ONE_PARTITION_CONTRIBUTED, random.nextInt(1000) + 5, 1),
// ε = ln(1.5), δ = 0.1, 1 partition contributed, idsCount = 0 .. 7 .. (for counts >= 7
// the probability is always = 1)
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 0, 0),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 1, 0.1),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 2, 0.25),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 3, 0.475),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 4, 0.716666666667),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 5, 0.877777777778),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 6, 0.985185185185),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, 7, 1),
KeepPartitionProbabilityTestCase.create(
LOW_EPSILON, DELTA, ONE_PARTITION_CONTRIBUTED, random.nextInt(1000) + 7, 1),
// ε = 50, δ = 1e-200, 1 partition contributed, idsCount = 0 .. 11 .. (for counts >= 11
// the probability is always = 1)
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 0, 0),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 1, 1e-200),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 2, 5.184706e-179),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 3, 2.688117e-157),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 4, 1.393710e-135),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 5, 7.225974e-114),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 6, 3.746455e-92),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 7, 1.942426e-70),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 8, 1.007091e-48),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 9, 5.221470e-27),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 10, 2.707178e-05),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 11, 1),
KeepPartitionProbabilityTestCase.create(
HIGH_EPSILON, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, random.nextInt(1000) + 11, 1),
// ε = Double.MAX_VALUE, δ = 1e-200, 1 partition contributed, idsCount = 0 .. 2 .. (for
// counts >= 2 the probability is always = 1)
KeepPartitionProbabilityTestCase.create(
Double.MAX_VALUE, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 0, 0),
KeepPartitionProbabilityTestCase.create(
Double.MAX_VALUE, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 1, 1e-200),
KeepPartitionProbabilityTestCase.create(
Double.MAX_VALUE, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, 2, 1),
KeepPartitionProbabilityTestCase.create(
Double.MAX_VALUE, LOW_DELTA, ONE_PARTITION_CONTRIBUTED, random.nextInt(1000) + 2, 1));
}
@AutoValue
public abstract static class KeepPartitionProbabilityTestCase implements Serializable {
static KeepPartitionProbabilityTestCase create(
double epsilon,
double delta,
int maxPartitionsContributed,
int idsCount,
double expectedProbability) {
return new AutoValue_PreAggSelectPartitionTest_KeepPartitionProbabilityTests_KeepPartitionProbabilityTestCase(
epsilon, delta, maxPartitionsContributed, idsCount, expectedProbability);
}
abstract double epsilon();
abstract double delta();
abstract int maxPartitionsContributed();
abstract int idsCount();
abstract double expectedProbability();
}
}
/**
* Note that {@link PreAggSelectPartitionSummary} isn't visible to the actual clients, who only
* see an opaque {@code byte[]} blob. Here, we parse said blob to perform whitebox testing, to
* verify some expectations of the blob's content. We do this because achieving good coverage with
* pure behaviour testing (i.e., blackbox testing) isn't possible.
*/
private static PreAggSelectPartitionSummary getSummary(
PreAggSelectPartition preAggSelectPartition) {
byte[] nonParsedSummary = preAggSelectPartition.getSerializableSummary();
try {
// We are deliberately ignoring the warning from JavaCodeClarity because
// ExtensionRegistry.getGeneratedRegistry() breaks kokoro tests, is not open-sourced, and
// there is no simple external alternative. However, we don't (and it is unlikely we will) use
// extensions in Summary protos, so we do not expect this to be a problem.
return PreAggSelectPartitionSummary.parseFrom(nonParsedSummary);
} catch (InvalidProtocolBufferException pbe) {
throw new IllegalArgumentException(pbe);
}
}
private static PreAggSelectPartition.Params.Builder getPreAggSelectPartitionBuilderWithFields() {
return PreAggSelectPartition.builder()
.epsilon(EPSILON)
.delta(DELTA)
.maxPartitionsContributed(1);
}
private static double getTolerance(double x, double y) {
double maxMagnitude = max(abs(x), abs(y));
return 1e-6 * maxMagnitude;
}
}
|
googleapis/google-cloud-java | 35,579 | java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/UpdateNotificationConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1;
/**
*
*
* <pre>
* Request message for updating a notification config.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.UpdateNotificationConfigRequest}
*/
public final class UpdateNotificationConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.UpdateNotificationConfigRequest)
UpdateNotificationConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateNotificationConfigRequest.newBuilder() to construct.
private UpdateNotificationConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateNotificationConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateNotificationConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateNotificationConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest.class,
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest.Builder.class);
}
private int bitField0_;
public static final int NOTIFICATION_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.securitycenter.v1.NotificationConfig notificationConfig_;
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the notificationConfig field is set.
*/
@java.lang.Override
public boolean hasNotificationConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The notificationConfig.
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.NotificationConfig getNotificationConfig() {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v1.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.NotificationConfigOrBuilder
getNotificationConfigOrBuilder() {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v1.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNotificationConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNotificationConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest other =
(com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest) obj;
if (hasNotificationConfig() != other.hasNotificationConfig()) return false;
if (hasNotificationConfig()) {
if (!getNotificationConfig().equals(other.getNotificationConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNotificationConfig()) {
hash = (37 * hash) + NOTIFICATION_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getNotificationConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating a notification config.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.UpdateNotificationConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.UpdateNotificationConfigRequest)
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateNotificationConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest.class,
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getNotificationConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
notificationConfig_ = null;
if (notificationConfigBuilder_ != null) {
notificationConfigBuilder_.dispose();
notificationConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest build() {
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest buildPartial() {
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest result =
new com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.notificationConfig_ =
notificationConfigBuilder_ == null
? notificationConfig_
: notificationConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest) {
return mergeFrom(
(com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest other) {
if (other
== com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
.getDefaultInstance()) return this;
if (other.hasNotificationConfig()) {
mergeNotificationConfig(other.getNotificationConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getNotificationConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.securitycenter.v1.NotificationConfig notificationConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.NotificationConfig,
com.google.cloud.securitycenter.v1.NotificationConfig.Builder,
com.google.cloud.securitycenter.v1.NotificationConfigOrBuilder>
notificationConfigBuilder_;
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the notificationConfig field is set.
*/
public boolean hasNotificationConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The notificationConfig.
*/
public com.google.cloud.securitycenter.v1.NotificationConfig getNotificationConfig() {
if (notificationConfigBuilder_ == null) {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v1.NotificationConfig.getDefaultInstance()
: notificationConfig_;
} else {
return notificationConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNotificationConfig(
com.google.cloud.securitycenter.v1.NotificationConfig value) {
if (notificationConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
notificationConfig_ = value;
} else {
notificationConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNotificationConfig(
com.google.cloud.securitycenter.v1.NotificationConfig.Builder builderForValue) {
if (notificationConfigBuilder_ == null) {
notificationConfig_ = builderForValue.build();
} else {
notificationConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeNotificationConfig(
com.google.cloud.securitycenter.v1.NotificationConfig value) {
if (notificationConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& notificationConfig_ != null
&& notificationConfig_
!= com.google.cloud.securitycenter.v1.NotificationConfig.getDefaultInstance()) {
getNotificationConfigBuilder().mergeFrom(value);
} else {
notificationConfig_ = value;
}
} else {
notificationConfigBuilder_.mergeFrom(value);
}
if (notificationConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearNotificationConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
notificationConfig_ = null;
if (notificationConfigBuilder_ != null) {
notificationConfigBuilder_.dispose();
notificationConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1.NotificationConfig.Builder
getNotificationConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNotificationConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1.NotificationConfigOrBuilder
getNotificationConfigOrBuilder() {
if (notificationConfigBuilder_ != null) {
return notificationConfigBuilder_.getMessageOrBuilder();
} else {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v1.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.NotificationConfig,
com.google.cloud.securitycenter.v1.NotificationConfig.Builder,
com.google.cloud.securitycenter.v1.NotificationConfigOrBuilder>
getNotificationConfigFieldBuilder() {
if (notificationConfigBuilder_ == null) {
notificationConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1.NotificationConfig,
com.google.cloud.securitycenter.v1.NotificationConfig.Builder,
com.google.cloud.securitycenter.v1.NotificationConfigOrBuilder>(
getNotificationConfig(), getParentForChildren(), isClean());
notificationConfig_ = null;
}
return notificationConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.UpdateNotificationConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.UpdateNotificationConfigRequest)
private static final com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest();
}
public static com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateNotificationConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateNotificationConfigRequest>() {
@java.lang.Override
public UpdateNotificationConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateNotificationConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateNotificationConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.UpdateNotificationConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,610 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/SessionSegmentCriteria.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* A session matches a criteria if the session's events meet the conditions in
* the criteria.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.SessionSegmentCriteria}
*/
public final class SessionSegmentCriteria extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.SessionSegmentCriteria)
SessionSegmentCriteriaOrBuilder {
private static final long serialVersionUID = 0L;
// Use SessionSegmentCriteria.newBuilder() to construct.
private SessionSegmentCriteria(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SessionSegmentCriteria() {
andConditionGroups_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SessionSegmentCriteria();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SessionSegmentCriteria_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SessionSegmentCriteria_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.SessionSegmentCriteria.class,
com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder.class);
}
public static final int AND_CONDITION_GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>
andConditionGroups_;
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>
getAndConditionGroupsList() {
return andConditionGroups_;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder>
getAndConditionGroupsOrBuilderList() {
return andConditionGroups_;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public int getAndConditionGroupsCount() {
return andConditionGroups_.size();
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroup getAndConditionGroups(
int index) {
return andConditionGroups_.get(index);
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder
getAndConditionGroupsOrBuilder(int index) {
return andConditionGroups_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < andConditionGroups_.size(); i++) {
output.writeMessage(1, andConditionGroups_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < andConditionGroups_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, andConditionGroups_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.SessionSegmentCriteria)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.SessionSegmentCriteria other =
(com.google.analytics.data.v1alpha.SessionSegmentCriteria) obj;
if (!getAndConditionGroupsList().equals(other.getAndConditionGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAndConditionGroupsCount() > 0) {
hash = (37 * hash) + AND_CONDITION_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getAndConditionGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.data.v1alpha.SessionSegmentCriteria prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A session matches a criteria if the session's events meet the conditions in
* the criteria.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.SessionSegmentCriteria}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.SessionSegmentCriteria)
com.google.analytics.data.v1alpha.SessionSegmentCriteriaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SessionSegmentCriteria_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SessionSegmentCriteria_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.SessionSegmentCriteria.class,
com.google.analytics.data.v1alpha.SessionSegmentCriteria.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.SessionSegmentCriteria.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (andConditionGroupsBuilder_ == null) {
andConditionGroups_ = java.util.Collections.emptyList();
} else {
andConditionGroups_ = null;
andConditionGroupsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SessionSegmentCriteria_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentCriteria getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentCriteria build() {
com.google.analytics.data.v1alpha.SessionSegmentCriteria result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentCriteria buildPartial() {
com.google.analytics.data.v1alpha.SessionSegmentCriteria result =
new com.google.analytics.data.v1alpha.SessionSegmentCriteria(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.data.v1alpha.SessionSegmentCriteria result) {
if (andConditionGroupsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
andConditionGroups_ = java.util.Collections.unmodifiableList(andConditionGroups_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.andConditionGroups_ = andConditionGroups_;
} else {
result.andConditionGroups_ = andConditionGroupsBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.data.v1alpha.SessionSegmentCriteria result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.SessionSegmentCriteria) {
return mergeFrom((com.google.analytics.data.v1alpha.SessionSegmentCriteria) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.SessionSegmentCriteria other) {
if (other == com.google.analytics.data.v1alpha.SessionSegmentCriteria.getDefaultInstance())
return this;
if (andConditionGroupsBuilder_ == null) {
if (!other.andConditionGroups_.isEmpty()) {
if (andConditionGroups_.isEmpty()) {
andConditionGroups_ = other.andConditionGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.addAll(other.andConditionGroups_);
}
onChanged();
}
} else {
if (!other.andConditionGroups_.isEmpty()) {
if (andConditionGroupsBuilder_.isEmpty()) {
andConditionGroupsBuilder_.dispose();
andConditionGroupsBuilder_ = null;
andConditionGroups_ = other.andConditionGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
andConditionGroupsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAndConditionGroupsFieldBuilder()
: null;
} else {
andConditionGroupsBuilder_.addAllMessages(other.andConditionGroups_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup m =
input.readMessage(
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.parser(),
extensionRegistry);
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(m);
} else {
andConditionGroupsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>
andConditionGroups_ = java.util.Collections.emptyList();
private void ensureAndConditionGroupsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
andConditionGroups_ =
new java.util.ArrayList<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>(
andConditionGroups_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder>
andConditionGroupsBuilder_;
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>
getAndConditionGroupsList() {
if (andConditionGroupsBuilder_ == null) {
return java.util.Collections.unmodifiableList(andConditionGroups_);
} else {
return andConditionGroupsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public int getAndConditionGroupsCount() {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.size();
} else {
return andConditionGroupsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroup getAndConditionGroups(
int index) {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.get(index);
} else {
return andConditionGroupsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder setAndConditionGroups(
int index, com.google.analytics.data.v1alpha.SessionSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.set(index, value);
onChanged();
} else {
andConditionGroupsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder setAndConditionGroups(
int index,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.set(index, builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(value);
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
int index, com.google.analytics.data.v1alpha.SessionSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(index, value);
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
int index,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(index, builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAllAndConditionGroups(
java.lang.Iterable<? extends com.google.analytics.data.v1alpha.SessionSegmentConditionGroup>
values) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, andConditionGroups_);
onChanged();
} else {
andConditionGroupsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder clearAndConditionGroups() {
if (andConditionGroupsBuilder_ == null) {
andConditionGroups_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
andConditionGroupsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder removeAndConditionGroups(int index) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.remove(index);
onChanged();
} else {
andConditionGroupsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder
getAndConditionGroupsBuilder(int index) {
return getAndConditionGroupsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder
getAndConditionGroupsOrBuilder(int index) {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.get(index);
} else {
return andConditionGroupsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<
? extends com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder>
getAndConditionGroupsOrBuilderList() {
if (andConditionGroupsBuilder_ != null) {
return andConditionGroupsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(andConditionGroups_);
}
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder
addAndConditionGroupsBuilder() {
return getAndConditionGroupsFieldBuilder()
.addBuilder(
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder
addAndConditionGroupsBuilder(int index) {
return getAndConditionGroupsFieldBuilder()
.addBuilder(
index,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* A session matches this criteria if the session matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.SessionSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder>
getAndConditionGroupsBuilderList() {
return getAndConditionGroupsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder>
getAndConditionGroupsFieldBuilder() {
if (andConditionGroupsBuilder_ == null) {
andConditionGroupsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.SessionSegmentConditionGroupOrBuilder>(
andConditionGroups_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
andConditionGroups_ = null;
}
return andConditionGroupsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.SessionSegmentCriteria)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.SessionSegmentCriteria)
private static final com.google.analytics.data.v1alpha.SessionSegmentCriteria DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.SessionSegmentCriteria();
}
public static com.google.analytics.data.v1alpha.SessionSegmentCriteria getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SessionSegmentCriteria> PARSER =
new com.google.protobuf.AbstractParser<SessionSegmentCriteria>() {
@java.lang.Override
public SessionSegmentCriteria parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SessionSegmentCriteria> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SessionSegmentCriteria> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SessionSegmentCriteria getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hbase | 35,686 | hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.filter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EmptyStackException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.regex.Pattern;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class allows a user to specify a filter via a string The string is parsed using the methods
* of this class and a filter object is constructed. This filter object is then wrapped in a scanner
* object which is then returned
* <p>
* This class addresses the HBASE-4176 JIRA. More documentation on this Filter Language can be found
* at: https://issues.apache.org/jira/browse/HBASE-4176
*/
@InterfaceAudience.Public
public class ParseFilter {
private static final Logger LOG = LoggerFactory.getLogger(ParseFilter.class);
private static HashMap<ByteBuffer, Integer> operatorPrecedenceHashMap;
private static HashMap<String, String> filterHashMap;
static {
// Registers all the filter supported by the Filter Language
filterHashMap = new HashMap<>();
filterHashMap.put("KeyOnlyFilter", ParseConstants.FILTER_PACKAGE + "." + "KeyOnlyFilter");
filterHashMap.put("FirstKeyOnlyFilter",
ParseConstants.FILTER_PACKAGE + "." + "FirstKeyOnlyFilter");
filterHashMap.put("PrefixFilter", ParseConstants.FILTER_PACKAGE + "." + "PrefixFilter");
filterHashMap.put("ColumnPrefixFilter",
ParseConstants.FILTER_PACKAGE + "." + "ColumnPrefixFilter");
filterHashMap.put("MultipleColumnPrefixFilter",
ParseConstants.FILTER_PACKAGE + "." + "MultipleColumnPrefixFilter");
filterHashMap.put("ColumnCountGetFilter",
ParseConstants.FILTER_PACKAGE + "." + "ColumnCountGetFilter");
filterHashMap.put("PageFilter", ParseConstants.FILTER_PACKAGE + "." + "PageFilter");
filterHashMap.put("ColumnPaginationFilter",
ParseConstants.FILTER_PACKAGE + "." + "ColumnPaginationFilter");
filterHashMap.put("InclusiveStopFilter",
ParseConstants.FILTER_PACKAGE + "." + "InclusiveStopFilter");
filterHashMap.put("TimestampsFilter", ParseConstants.FILTER_PACKAGE + "." + "TimestampsFilter");
filterHashMap.put("RowFilter", ParseConstants.FILTER_PACKAGE + "." + "RowFilter");
filterHashMap.put("FamilyFilter", ParseConstants.FILTER_PACKAGE + "." + "FamilyFilter");
filterHashMap.put("QualifierFilter", ParseConstants.FILTER_PACKAGE + "." + "QualifierFilter");
filterHashMap.put("ValueFilter", ParseConstants.FILTER_PACKAGE + "." + "ValueFilter");
filterHashMap.put("ColumnRangeFilter",
ParseConstants.FILTER_PACKAGE + "." + "ColumnRangeFilter");
filterHashMap.put("SingleColumnValueFilter",
ParseConstants.FILTER_PACKAGE + "." + "SingleColumnValueFilter");
filterHashMap.put("SingleColumnValueExcludeFilter",
ParseConstants.FILTER_PACKAGE + "." + "SingleColumnValueExcludeFilter");
filterHashMap.put("DependentColumnFilter",
ParseConstants.FILTER_PACKAGE + "." + "DependentColumnFilter");
filterHashMap.put("ColumnValueFilter",
ParseConstants.FILTER_PACKAGE + "." + "ColumnValueFilter");
// Creates the operatorPrecedenceHashMap
operatorPrecedenceHashMap = new HashMap<>();
operatorPrecedenceHashMap.put(ParseConstants.SKIP_BUFFER, 1);
operatorPrecedenceHashMap.put(ParseConstants.WHILE_BUFFER, 1);
operatorPrecedenceHashMap.put(ParseConstants.AND_BUFFER, 2);
operatorPrecedenceHashMap.put(ParseConstants.OR_BUFFER, 3);
}
/**
* Parses the filterString and constructs a filter using it
* <p>
* @param filterString filter string given by the user
* @return filter object we constructed
*/
public Filter parseFilterString(String filterString) throws CharacterCodingException {
return parseFilterString(Bytes.toBytes(filterString));
}
/**
* Parses the filterString and constructs a filter using it
* <p>
* @param filterStringAsByteArray filter string given by the user
* @return filter object we constructed
*/
public Filter parseFilterString(byte[] filterStringAsByteArray) throws CharacterCodingException {
// stack for the operators and parenthesis
Stack<ByteBuffer> operatorStack = new Stack<>();
// stack for the filter objects
Stack<Filter> filterStack = new Stack<>();
Filter filter = null;
for (int i = 0; i < filterStringAsByteArray.length; i++) {
if (filterStringAsByteArray[i] == ParseConstants.LPAREN) {
// LPAREN found
operatorStack.push(ParseConstants.LPAREN_BUFFER);
} else if (
filterStringAsByteArray[i] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[i] == ParseConstants.TAB
) {
// WHITESPACE or TAB found
continue;
} else if (checkForOr(filterStringAsByteArray, i)) {
// OR found
i += ParseConstants.OR_ARRAY.length - 1;
reduce(operatorStack, filterStack, ParseConstants.OR_BUFFER);
operatorStack.push(ParseConstants.OR_BUFFER);
} else if (checkForAnd(filterStringAsByteArray, i)) {
// AND found
i += ParseConstants.AND_ARRAY.length - 1;
reduce(operatorStack, filterStack, ParseConstants.AND_BUFFER);
operatorStack.push(ParseConstants.AND_BUFFER);
} else if (checkForSkip(filterStringAsByteArray, i)) {
// SKIP found
i += ParseConstants.SKIP_ARRAY.length - 1;
reduce(operatorStack, filterStack, ParseConstants.SKIP_BUFFER);
operatorStack.push(ParseConstants.SKIP_BUFFER);
} else if (checkForWhile(filterStringAsByteArray, i)) {
// WHILE found
i += ParseConstants.WHILE_ARRAY.length - 1;
reduce(operatorStack, filterStack, ParseConstants.WHILE_BUFFER);
operatorStack.push(ParseConstants.WHILE_BUFFER);
} else if (filterStringAsByteArray[i] == ParseConstants.RPAREN) {
// RPAREN found
if (operatorStack.empty()) {
throw new IllegalArgumentException("Mismatched parenthesis");
}
ByteBuffer argumentOnTopOfStack = operatorStack.peek();
if (argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) {
operatorStack.pop();
continue;
}
while (!argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) {
filterStack.push(popArguments(operatorStack, filterStack));
if (operatorStack.empty()) {
throw new IllegalArgumentException("Mismatched parenthesis");
}
argumentOnTopOfStack = operatorStack.pop();
}
} else {
// SimpleFilterExpression found
byte[] filterSimpleExpression = extractFilterSimpleExpression(filterStringAsByteArray, i);
i += (filterSimpleExpression.length - 1);
filter = parseSimpleFilterExpression(filterSimpleExpression);
filterStack.push(filter);
}
}
// Finished parsing filterString
while (!operatorStack.empty()) {
filterStack.push(popArguments(operatorStack, filterStack));
}
if (filterStack.empty()) {
throw new IllegalArgumentException("Incorrect Filter String");
}
filter = filterStack.pop();
if (!filterStack.empty()) {
throw new IllegalArgumentException("Incorrect Filter String");
}
return filter;
}
/**
* Extracts a simple filter expression from the filter string given by the user
* <p>
* A simpleFilterExpression is of the form: FilterName('arg', 'arg', 'arg') The user given filter
* string can have many simpleFilterExpressions combined using operators.
* <p>
* This function extracts a simpleFilterExpression from the larger filterString given the start
* offset of the simpler expression
* <p>
* @param filterStringAsByteArray filter string given by the user
* @param filterExpressionStartOffset start index of the simple filter expression
* @return byte array containing the simple filter expression
*/
public byte[] extractFilterSimpleExpression(byte[] filterStringAsByteArray,
int filterExpressionStartOffset) throws CharacterCodingException {
int quoteCount = 0;
for (int i = filterExpressionStartOffset; i < filterStringAsByteArray.length; i++) {
if (filterStringAsByteArray[i] == ParseConstants.SINGLE_QUOTE) {
if (isQuoteUnescaped(filterStringAsByteArray, i)) {
quoteCount++;
} else {
// To skip the next quote that has been escaped
i++;
}
}
if (filterStringAsByteArray[i] == ParseConstants.RPAREN && (quoteCount % 2) == 0) {
byte[] filterSimpleExpression = new byte[i - filterExpressionStartOffset + 1];
Bytes.putBytes(filterSimpleExpression, 0, filterStringAsByteArray,
filterExpressionStartOffset, i - filterExpressionStartOffset + 1);
return filterSimpleExpression;
}
}
throw new IllegalArgumentException("Incorrect Filter String");
}
/**
* Constructs a filter object given a simple filter expression
* <p>
* @param filterStringAsByteArray filter string given by the user
* @return filter object we constructed
*/
public Filter parseSimpleFilterExpression(byte[] filterStringAsByteArray)
throws CharacterCodingException {
String filterName = Bytes.toString(getFilterName(filterStringAsByteArray));
ArrayList<byte[]> filterArguments = getFilterArguments(filterStringAsByteArray);
if (!filterHashMap.containsKey(filterName)) {
throw new IllegalArgumentException("Filter Name " + filterName + " not supported");
}
filterName = filterHashMap.get(filterName);
final String methodName = "createFilterFromArguments";
try {
Class<?> c = Class.forName(filterName);
Class<?>[] argTypes = new Class[] { ArrayList.class };
Method m = c.getDeclaredMethod(methodName, argTypes);
return (Filter) m.invoke(null, filterArguments);
} catch (ClassNotFoundException e) {
LOG.error("Could not find class {}", filterName, e);
} catch (NoSuchMethodException e) {
LOG.error("Could not find method {} in {}", methodName, filterName, e);
} catch (IllegalAccessException e) {
LOG.error("Unable to access specified class {}", filterName, e);
} catch (InvocationTargetException e) {
LOG.error("Method {} threw an exception for {}", methodName, filterName, e);
}
throw new IllegalArgumentException(
"Incorrect filter string " + new String(filterStringAsByteArray, StandardCharsets.UTF_8));
}
/**
* Returns the filter name given a simple filter expression
* <p>
* @param filterStringAsByteArray a simple filter expression
* @return name of filter in the simple filter expression
*/
public static byte[] getFilterName(byte[] filterStringAsByteArray) {
int filterNameStartIndex = 0;
int filterNameEndIndex = 0;
for (int i = filterNameStartIndex; i < filterStringAsByteArray.length; i++) {
if (
filterStringAsByteArray[i] == ParseConstants.LPAREN
|| filterStringAsByteArray[i] == ParseConstants.WHITESPACE
) {
filterNameEndIndex = i;
break;
}
}
if (filterNameEndIndex == 0) {
throw new IllegalArgumentException("Incorrect Filter Name");
}
byte[] filterName = new byte[filterNameEndIndex - filterNameStartIndex];
Bytes.putBytes(filterName, 0, filterStringAsByteArray, 0,
filterNameEndIndex - filterNameStartIndex);
return filterName;
}
/**
* Returns the arguments of the filter from the filter string
* <p>
* @param filterStringAsByteArray filter string given by the user
* @return an ArrayList containing the arguments of the filter in the filter string
*/
public static ArrayList<byte[]> getFilterArguments(byte[] filterStringAsByteArray) {
int argumentListStartIndex = Bytes.searchDelimiterIndex(filterStringAsByteArray, 0,
filterStringAsByteArray.length, ParseConstants.LPAREN);
if (argumentListStartIndex == -1) {
throw new IllegalArgumentException("Incorrect argument list");
}
int argumentStartIndex = 0;
int argumentEndIndex = 0;
ArrayList<byte[]> filterArguments = new ArrayList<>();
for (int i = argumentListStartIndex + 1; i < filterStringAsByteArray.length; i++) {
if (
filterStringAsByteArray[i] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[i] == ParseConstants.COMMA
|| filterStringAsByteArray[i] == ParseConstants.RPAREN
) {
continue;
}
// The argument is in single quotes - for example 'prefix'
if (filterStringAsByteArray[i] == ParseConstants.SINGLE_QUOTE) {
argumentStartIndex = i;
for (int j = argumentStartIndex + 1; j < filterStringAsByteArray.length; j++) {
if (filterStringAsByteArray[j] == ParseConstants.SINGLE_QUOTE) {
if (isQuoteUnescaped(filterStringAsByteArray, j)) {
argumentEndIndex = j;
i = j + 1;
byte[] filterArgument = createUnescapdArgument(filterStringAsByteArray,
argumentStartIndex, argumentEndIndex);
filterArguments.add(filterArgument);
break;
} else {
// To jump over the second escaped quote
j++;
}
} else if (j == filterStringAsByteArray.length - 1) {
throw new IllegalArgumentException("Incorrect argument list");
}
}
} else {
// The argument is an integer, boolean, comparison operator like <, >, != etc
argumentStartIndex = i;
for (int j = argumentStartIndex; j < filterStringAsByteArray.length; j++) {
if (
filterStringAsByteArray[j] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[j] == ParseConstants.COMMA
|| filterStringAsByteArray[j] == ParseConstants.RPAREN
) {
argumentEndIndex = j - 1;
i = j;
byte[] filterArgument = new byte[argumentEndIndex - argumentStartIndex + 1];
Bytes.putBytes(filterArgument, 0, filterStringAsByteArray, argumentStartIndex,
argumentEndIndex - argumentStartIndex + 1);
filterArguments.add(filterArgument);
break;
} else if (j == filterStringAsByteArray.length - 1) {
throw new IllegalArgumentException("Incorrect argument list");
}
}
}
}
return filterArguments;
}
/**
* This function is called while parsing the filterString and an operator is parsed
* <p>
* @param operatorStack the stack containing the operators and parenthesis
* @param filterStack the stack containing the filters
* @param operator the operator found while parsing the filterString
*/
public void reduce(Stack<ByteBuffer> operatorStack, Stack<Filter> filterStack,
ByteBuffer operator) {
while (
!operatorStack.empty() && !ParseConstants.LPAREN_BUFFER.equals(operatorStack.peek())
&& hasHigherPriority(operatorStack.peek(), operator)
) {
filterStack.push(popArguments(operatorStack, filterStack));
}
}
/**
* Pops an argument from the operator stack and the number of arguments required by the operator
* from the filterStack and evaluates them
* <p>
* @param operatorStack the stack containing the operators
* @param filterStack the stack containing the filters
* @return the evaluated filter
*/
public static Filter popArguments(Stack<ByteBuffer> operatorStack, Stack<Filter> filterStack) {
ByteBuffer argumentOnTopOfStack = operatorStack.peek();
if (argumentOnTopOfStack.equals(ParseConstants.OR_BUFFER)) {
// The top of the stack is an OR
try {
ArrayList<Filter> listOfFilters = new ArrayList<>();
while (!operatorStack.empty() && operatorStack.peek().equals(ParseConstants.OR_BUFFER)) {
Filter filter = filterStack.pop();
listOfFilters.add(0, filter);
operatorStack.pop();
}
Filter filter = filterStack.pop();
listOfFilters.add(0, filter);
Filter orFilter = new FilterList(FilterList.Operator.MUST_PASS_ONE, listOfFilters);
return orFilter;
} catch (EmptyStackException e) {
throw new IllegalArgumentException("Incorrect input string - an OR needs two filters");
}
} else if (argumentOnTopOfStack.equals(ParseConstants.AND_BUFFER)) {
// The top of the stack is an AND
try {
ArrayList<Filter> listOfFilters = new ArrayList<>();
while (!operatorStack.empty() && operatorStack.peek().equals(ParseConstants.AND_BUFFER)) {
Filter filter = filterStack.pop();
listOfFilters.add(0, filter);
operatorStack.pop();
}
Filter filter = filterStack.pop();
listOfFilters.add(0, filter);
Filter andFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, listOfFilters);
return andFilter;
} catch (EmptyStackException e) {
throw new IllegalArgumentException("Incorrect input string - an AND needs two filters");
}
} else if (argumentOnTopOfStack.equals(ParseConstants.SKIP_BUFFER)) {
// The top of the stack is a SKIP
try {
Filter wrappedFilter = filterStack.pop();
Filter skipFilter = new SkipFilter(wrappedFilter);
operatorStack.pop();
return skipFilter;
} catch (EmptyStackException e) {
throw new IllegalArgumentException("Incorrect input string - a SKIP wraps a filter");
}
} else if (argumentOnTopOfStack.equals(ParseConstants.WHILE_BUFFER)) {
// The top of the stack is a WHILE
try {
Filter wrappedFilter = filterStack.pop();
Filter whileMatchFilter = new WhileMatchFilter(wrappedFilter);
operatorStack.pop();
return whileMatchFilter;
} catch (EmptyStackException e) {
throw new IllegalArgumentException("Incorrect input string - a WHILE wraps a filter");
}
} else if (argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) {
// The top of the stack is a LPAREN
try {
Filter filter = filterStack.pop();
operatorStack.pop();
return filter;
} catch (EmptyStackException e) {
throw new IllegalArgumentException("Incorrect Filter String");
}
} else {
throw new IllegalArgumentException("Incorrect arguments on operatorStack");
}
}
/**
* Returns which operator has higher precedence
* <p>
* If a has higher precedence than b, it returns true If they have the same precedence, it returns
* false
*/
public boolean hasHigherPriority(ByteBuffer a, ByteBuffer b) {
if ((operatorPrecedenceHashMap.get(a) - operatorPrecedenceHashMap.get(b)) < 0) {
return true;
}
return false;
}
/**
* Removes the single quote escaping a single quote - thus it returns an unescaped argument
* <p>
* @param filterStringAsByteArray filter string given by user
* @param argumentStartIndex start index of the argument
* @param argumentEndIndex end index of the argument
* @return returns an unescaped argument
*/
public static byte[] createUnescapdArgument(byte[] filterStringAsByteArray,
int argumentStartIndex, int argumentEndIndex) {
int unescapedArgumentLength = 2;
for (int i = argumentStartIndex + 1; i <= argumentEndIndex - 1; i++) {
unescapedArgumentLength++;
if (
filterStringAsByteArray[i] == ParseConstants.SINGLE_QUOTE && i != (argumentEndIndex - 1)
&& filterStringAsByteArray[i + 1] == ParseConstants.SINGLE_QUOTE
) {
i++;
continue;
}
}
byte[] unescapedArgument = new byte[unescapedArgumentLength];
int count = 1;
unescapedArgument[0] = '\'';
for (int i = argumentStartIndex + 1; i <= argumentEndIndex - 1; i++) {
if (
filterStringAsByteArray[i] == ParseConstants.SINGLE_QUOTE && i != (argumentEndIndex - 1)
&& filterStringAsByteArray[i + 1] == ParseConstants.SINGLE_QUOTE
) {
unescapedArgument[count++] = filterStringAsByteArray[i + 1];
i++;
} else {
unescapedArgument[count++] = filterStringAsByteArray[i];
}
}
unescapedArgument[unescapedArgumentLength - 1] = '\'';
return unescapedArgument;
}
/**
* Checks if the current index of filter string we are on is the beginning of the keyword 'OR'
* <p>
* @param filterStringAsByteArray filter string given by the user
* @param indexOfOr index at which an 'O' was read
* @return true if the keyword 'OR' is at the current index
*/
public static boolean checkForOr(byte[] filterStringAsByteArray, int indexOfOr)
throws CharacterCodingException, ArrayIndexOutOfBoundsException {
try {
if (
filterStringAsByteArray[indexOfOr] == ParseConstants.O
&& filterStringAsByteArray[indexOfOr + 1] == ParseConstants.R
&& (filterStringAsByteArray[indexOfOr - 1] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfOr - 1] == ParseConstants.RPAREN)
&& (filterStringAsByteArray[indexOfOr + 2] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfOr + 2] == ParseConstants.LPAREN)
) {
return true;
} else {
return false;
}
} catch (ArrayIndexOutOfBoundsException e) {
return false;
}
}
/**
* Checks if the current index of filter string we are on is the beginning of the keyword 'AND'
* <p>
* @param filterStringAsByteArray filter string given by the user
* @param indexOfAnd index at which an 'A' was read
* @return true if the keyword 'AND' is at the current index
*/
public static boolean checkForAnd(byte[] filterStringAsByteArray, int indexOfAnd)
throws CharacterCodingException {
try {
if (
filterStringAsByteArray[indexOfAnd] == ParseConstants.A
&& filterStringAsByteArray[indexOfAnd + 1] == ParseConstants.N
&& filterStringAsByteArray[indexOfAnd + 2] == ParseConstants.D
&& (filterStringAsByteArray[indexOfAnd - 1] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfAnd - 1] == ParseConstants.RPAREN)
&& (filterStringAsByteArray[indexOfAnd + 3] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfAnd + 3] == ParseConstants.LPAREN)
) {
return true;
} else {
return false;
}
} catch (ArrayIndexOutOfBoundsException e) {
return false;
}
}
/**
* Checks if the current index of filter string we are on is the beginning of the keyword 'SKIP'
* <p>
* @param filterStringAsByteArray filter string given by the user
* @param indexOfSkip index at which an 'S' was read
* @return true if the keyword 'SKIP' is at the current index
*/
public static boolean checkForSkip(byte[] filterStringAsByteArray, int indexOfSkip)
throws CharacterCodingException {
try {
if (
filterStringAsByteArray[indexOfSkip] == ParseConstants.S
&& filterStringAsByteArray[indexOfSkip + 1] == ParseConstants.K
&& filterStringAsByteArray[indexOfSkip + 2] == ParseConstants.I
&& filterStringAsByteArray[indexOfSkip + 3] == ParseConstants.P
&& (indexOfSkip == 0
|| filterStringAsByteArray[indexOfSkip - 1] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfSkip - 1] == ParseConstants.RPAREN
|| filterStringAsByteArray[indexOfSkip - 1] == ParseConstants.LPAREN)
&& (filterStringAsByteArray[indexOfSkip + 4] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfSkip + 4] == ParseConstants.LPAREN)
) {
return true;
} else {
return false;
}
} catch (ArrayIndexOutOfBoundsException e) {
return false;
}
}
/**
* Checks if the current index of filter string we are on is the beginning of the keyword 'WHILE'
* <p>
* @param filterStringAsByteArray filter string given by the user
* @param indexOfWhile index at which an 'W' was read
* @return true if the keyword 'WHILE' is at the current index
*/
public static boolean checkForWhile(byte[] filterStringAsByteArray, int indexOfWhile)
throws CharacterCodingException {
try {
if (
filterStringAsByteArray[indexOfWhile] == ParseConstants.W
&& filterStringAsByteArray[indexOfWhile + 1] == ParseConstants.H
&& filterStringAsByteArray[indexOfWhile + 2] == ParseConstants.I
&& filterStringAsByteArray[indexOfWhile + 3] == ParseConstants.L
&& filterStringAsByteArray[indexOfWhile + 4] == ParseConstants.E
&& (indexOfWhile == 0
|| filterStringAsByteArray[indexOfWhile - 1] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfWhile - 1] == ParseConstants.RPAREN
|| filterStringAsByteArray[indexOfWhile - 1] == ParseConstants.LPAREN)
&& (filterStringAsByteArray[indexOfWhile + 5] == ParseConstants.WHITESPACE
|| filterStringAsByteArray[indexOfWhile + 5] == ParseConstants.LPAREN)
) {
return true;
} else {
return false;
}
} catch (ArrayIndexOutOfBoundsException e) {
return false;
}
}
/**
* Returns a boolean indicating whether the quote was escaped or not
* <p>
* @param array byte array in which the quote was found
* @param quoteIndex index of the single quote
* @return returns true if the quote was unescaped
*/
public static boolean isQuoteUnescaped(byte[] array, int quoteIndex) {
if (array == null) {
throw new IllegalArgumentException("isQuoteUnescaped called with a null array");
}
if (quoteIndex == array.length - 1 || array[quoteIndex + 1] != ParseConstants.SINGLE_QUOTE) {
return true;
} else {
return false;
}
}
/**
* Takes a quoted byte array and converts it into an unquoted byte array For example: given a byte
* array representing 'abc', it returns a byte array representing abc
* <p>
* @param quotedByteArray the quoted byte array
* @return Unquoted byte array
*/
public static byte[] removeQuotesFromByteArray(byte[] quotedByteArray) {
if (
quotedByteArray == null || quotedByteArray.length < 2
|| quotedByteArray[0] != ParseConstants.SINGLE_QUOTE
|| quotedByteArray[quotedByteArray.length - 1] != ParseConstants.SINGLE_QUOTE
) {
throw new IllegalArgumentException("removeQuotesFromByteArray needs a quoted byte array");
} else {
byte[] targetString = new byte[quotedByteArray.length - 2];
Bytes.putBytes(targetString, 0, quotedByteArray, 1, quotedByteArray.length - 2);
return targetString;
}
}
/**
* Converts an int expressed in a byte array to an actual int
* <p>
* This doesn't use Bytes.toInt because that assumes that there will be {@link Bytes#SIZEOF_INT}
* bytes available.
* <p>
* @param numberAsByteArray the int value expressed as a byte array
* @return the int value
*/
public static int convertByteArrayToInt(byte[] numberAsByteArray) {
long tempResult = ParseFilter.convertByteArrayToLong(numberAsByteArray);
if (tempResult > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Integer Argument too large");
} else if (tempResult < Integer.MIN_VALUE) {
throw new IllegalArgumentException("Integer Argument too small");
}
int result = (int) tempResult;
return result;
}
/**
* Converts a long expressed in a byte array to an actual long
* <p>
* This doesn't use Bytes.toLong because that assumes that there will be {@link Bytes#SIZEOF_INT}
* bytes available.
* <p>
* @param numberAsByteArray the long value expressed as a byte array
* @return the long value
*/
public static long convertByteArrayToLong(byte[] numberAsByteArray) {
if (numberAsByteArray == null) {
throw new IllegalArgumentException("convertByteArrayToLong called with a null array");
}
int i = 0;
long result = 0;
boolean isNegative = false;
if (numberAsByteArray[i] == ParseConstants.MINUS_SIGN) {
i++;
isNegative = true;
}
while (i != numberAsByteArray.length) {
if (
numberAsByteArray[i] < ParseConstants.ZERO || numberAsByteArray[i] > ParseConstants.NINE
) {
throw new IllegalArgumentException("Byte Array should only contain digits");
}
result = result * 10 + (numberAsByteArray[i] - ParseConstants.ZERO);
if (result < 0) {
throw new IllegalArgumentException("Long Argument too large");
}
i++;
}
if (isNegative) {
return -result;
} else {
return result;
}
}
/**
* Converts a boolean expressed in a byte array to an actual boolean
* <p>
* This doesn't used Bytes.toBoolean because Bytes.toBoolean(byte []) assumes that 1 stands for
* true and 0 for false. Here, the byte array representing "true" and "false" is parsed
* <p>
* @param booleanAsByteArray the boolean value expressed as a byte array
* @return the boolean value
*/
public static boolean convertByteArrayToBoolean(byte[] booleanAsByteArray) {
if (booleanAsByteArray == null) {
throw new IllegalArgumentException("convertByteArrayToBoolean called with a null array");
}
if (
booleanAsByteArray.length == 4
&& (booleanAsByteArray[0] == 't' || booleanAsByteArray[0] == 'T')
&& (booleanAsByteArray[1] == 'r' || booleanAsByteArray[1] == 'R')
&& (booleanAsByteArray[2] == 'u' || booleanAsByteArray[2] == 'U')
&& (booleanAsByteArray[3] == 'e' || booleanAsByteArray[3] == 'E')
) {
return true;
} else if (
booleanAsByteArray.length == 5
&& (booleanAsByteArray[0] == 'f' || booleanAsByteArray[0] == 'F')
&& (booleanAsByteArray[1] == 'a' || booleanAsByteArray[1] == 'A')
&& (booleanAsByteArray[2] == 'l' || booleanAsByteArray[2] == 'L')
&& (booleanAsByteArray[3] == 's' || booleanAsByteArray[3] == 'S')
&& (booleanAsByteArray[4] == 'e' || booleanAsByteArray[4] == 'E')
) {
return false;
} else {
throw new IllegalArgumentException("Incorrect Boolean Expression");
}
}
/**
* Takes a compareOperator symbol as a byte array and returns the corresponding CompareOperator
* @param compareOpAsByteArray the comparatorOperator symbol as a byte array
* @return the Compare Operator
*/
public static CompareOperator createCompareOperator(byte[] compareOpAsByteArray) {
ByteBuffer compareOp = ByteBuffer.wrap(compareOpAsByteArray);
if (compareOp.equals(ParseConstants.LESS_THAN_BUFFER)) return CompareOperator.LESS;
else if (compareOp.equals(ParseConstants.LESS_THAN_OR_EQUAL_TO_BUFFER))
return CompareOperator.LESS_OR_EQUAL;
else if (compareOp.equals(ParseConstants.GREATER_THAN_BUFFER)) return CompareOperator.GREATER;
else if (compareOp.equals(ParseConstants.GREATER_THAN_OR_EQUAL_TO_BUFFER))
return CompareOperator.GREATER_OR_EQUAL;
else if (compareOp.equals(ParseConstants.NOT_EQUAL_TO_BUFFER)) return CompareOperator.NOT_EQUAL;
else if (compareOp.equals(ParseConstants.EQUAL_TO_BUFFER)) return CompareOperator.EQUAL;
else throw new IllegalArgumentException("Invalid compare operator");
}
/**
* Parses a comparator of the form comparatorType:comparatorValue form and returns a comparator
* <p>
* @param comparator the comparator in the form comparatorType:comparatorValue
* @return the parsed comparator
*/
public static ByteArrayComparable createComparator(byte[] comparator) {
if (comparator == null) throw new IllegalArgumentException("Incorrect Comparator");
byte[][] parsedComparator = ParseFilter.parseComparator(comparator);
byte[] comparatorType = parsedComparator[0];
byte[] comparatorValue = parsedComparator[1];
if (Bytes.equals(comparatorType, ParseConstants.binaryType))
return new BinaryComparator(comparatorValue);
else if (Bytes.equals(comparatorType, ParseConstants.binaryPrefixType))
return new BinaryPrefixComparator(comparatorValue);
else if (Bytes.equals(comparatorType, ParseConstants.regexStringType))
return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
else if (Bytes.equals(comparatorType, ParseConstants.regexStringNoCaseType))
return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8),
Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
else if (Bytes.equals(comparatorType, ParseConstants.substringType))
return new SubstringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
else throw new IllegalArgumentException("Incorrect comparatorType");
}
/**
* Splits a column in comparatorType:comparatorValue form into separate byte arrays
* <p>
* @param comparator the comparator
* @return the parsed arguments of the comparator as a 2D byte array
*/
public static byte[][] parseComparator(byte[] comparator) {
final int index =
Bytes.searchDelimiterIndex(comparator, 0, comparator.length, ParseConstants.COLON);
if (index == -1) {
throw new IllegalArgumentException("Incorrect comparator");
}
byte[][] result = new byte[2][0];
result[0] = new byte[index];
System.arraycopy(comparator, 0, result[0], 0, index);
final int len = comparator.length - (index + 1);
result[1] = new byte[len];
System.arraycopy(comparator, index + 1, result[1], 0, len);
return result;
}
/**
* Return a Set of filters supported by the Filter Language
*/
public Set<String> getSupportedFilters() {
return filterHashMap.keySet();
}
/**
* Returns all known filters
* @return an unmodifiable map of filters
*/
public static Map<String, String> getAllFilters() {
return Collections.unmodifiableMap(filterHashMap);
}
/**
* Register a new filter with the parser. If the filter is already registered, an
* IllegalArgumentException will be thrown.
* @param name a name for the filter
* @param filterClass fully qualified class name
*/
public static void registerFilter(String name, String filterClass) {
if (LOG.isInfoEnabled()) LOG.info("Registering new filter " + name);
filterHashMap.put(name, filterClass);
}
}
|
googleapis/sdk-platform-java | 35,550 | java-iam/proto-google-iam-v2beta/src/main/java/com/google/iam/v2beta/ListPoliciesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/v2beta/policy.proto
// Protobuf Java Version: 3.25.8
package com.google.iam.v2beta;
/**
*
*
* <pre>
* Response message for `ListPolicies`.
* </pre>
*
* Protobuf type {@code google.iam.v2beta.ListPoliciesResponse}
*/
public final class ListPoliciesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.iam.v2beta.ListPoliciesResponse)
ListPoliciesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPoliciesResponse.newBuilder() to construct.
private ListPoliciesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPoliciesResponse() {
policies_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPoliciesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.v2beta.PolicyProto
.internal_static_google_iam_v2beta_ListPoliciesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.v2beta.PolicyProto
.internal_static_google_iam_v2beta_ListPoliciesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.v2beta.ListPoliciesResponse.class,
com.google.iam.v2beta.ListPoliciesResponse.Builder.class);
}
public static final int POLICIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.iam.v2beta.Policy> policies_;
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.iam.v2beta.Policy> getPoliciesList() {
return policies_;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.iam.v2beta.PolicyOrBuilder>
getPoliciesOrBuilderList() {
return policies_;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
@java.lang.Override
public int getPoliciesCount() {
return policies_.size();
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
@java.lang.Override
public com.google.iam.v2beta.Policy getPolicies(int index) {
return policies_.get(index);
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
@java.lang.Override
public com.google.iam.v2beta.PolicyOrBuilder getPoliciesOrBuilder(int index) {
return policies_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < policies_.size(); i++) {
output.writeMessage(1, policies_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < policies_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policies_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.iam.v2beta.ListPoliciesResponse)) {
return super.equals(obj);
}
com.google.iam.v2beta.ListPoliciesResponse other =
(com.google.iam.v2beta.ListPoliciesResponse) obj;
if (!getPoliciesList().equals(other.getPoliciesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPoliciesCount() > 0) {
hash = (37 * hash) + POLICIES_FIELD_NUMBER;
hash = (53 * hash) + getPoliciesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.v2beta.ListPoliciesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.iam.v2beta.ListPoliciesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for `ListPolicies`.
* </pre>
*
* Protobuf type {@code google.iam.v2beta.ListPoliciesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.iam.v2beta.ListPoliciesResponse)
com.google.iam.v2beta.ListPoliciesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.v2beta.PolicyProto
.internal_static_google_iam_v2beta_ListPoliciesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.v2beta.PolicyProto
.internal_static_google_iam_v2beta_ListPoliciesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.v2beta.ListPoliciesResponse.class,
com.google.iam.v2beta.ListPoliciesResponse.Builder.class);
}
// Construct using com.google.iam.v2beta.ListPoliciesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (policiesBuilder_ == null) {
policies_ = java.util.Collections.emptyList();
} else {
policies_ = null;
policiesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.iam.v2beta.PolicyProto
.internal_static_google_iam_v2beta_ListPoliciesResponse_descriptor;
}
@java.lang.Override
public com.google.iam.v2beta.ListPoliciesResponse getDefaultInstanceForType() {
return com.google.iam.v2beta.ListPoliciesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.iam.v2beta.ListPoliciesResponse build() {
com.google.iam.v2beta.ListPoliciesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.iam.v2beta.ListPoliciesResponse buildPartial() {
com.google.iam.v2beta.ListPoliciesResponse result =
new com.google.iam.v2beta.ListPoliciesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.iam.v2beta.ListPoliciesResponse result) {
if (policiesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
policies_ = java.util.Collections.unmodifiableList(policies_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.policies_ = policies_;
} else {
result.policies_ = policiesBuilder_.build();
}
}
private void buildPartial0(com.google.iam.v2beta.ListPoliciesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.iam.v2beta.ListPoliciesResponse) {
return mergeFrom((com.google.iam.v2beta.ListPoliciesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.iam.v2beta.ListPoliciesResponse other) {
if (other == com.google.iam.v2beta.ListPoliciesResponse.getDefaultInstance()) return this;
if (policiesBuilder_ == null) {
if (!other.policies_.isEmpty()) {
if (policies_.isEmpty()) {
policies_ = other.policies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePoliciesIsMutable();
policies_.addAll(other.policies_);
}
onChanged();
}
} else {
if (!other.policies_.isEmpty()) {
if (policiesBuilder_.isEmpty()) {
policiesBuilder_.dispose();
policiesBuilder_ = null;
policies_ = other.policies_;
bitField0_ = (bitField0_ & ~0x00000001);
policiesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPoliciesFieldBuilder()
: null;
} else {
policiesBuilder_.addAllMessages(other.policies_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.iam.v2beta.Policy m =
input.readMessage(com.google.iam.v2beta.Policy.parser(), extensionRegistry);
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
policies_.add(m);
} else {
policiesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.iam.v2beta.Policy> policies_ =
java.util.Collections.emptyList();
private void ensurePoliciesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
policies_ = new java.util.ArrayList<com.google.iam.v2beta.Policy>(policies_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.v2beta.Policy,
com.google.iam.v2beta.Policy.Builder,
com.google.iam.v2beta.PolicyOrBuilder>
policiesBuilder_;
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public java.util.List<com.google.iam.v2beta.Policy> getPoliciesList() {
if (policiesBuilder_ == null) {
return java.util.Collections.unmodifiableList(policies_);
} else {
return policiesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public int getPoliciesCount() {
if (policiesBuilder_ == null) {
return policies_.size();
} else {
return policiesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public com.google.iam.v2beta.Policy getPolicies(int index) {
if (policiesBuilder_ == null) {
return policies_.get(index);
} else {
return policiesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder setPolicies(int index, com.google.iam.v2beta.Policy value) {
if (policiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePoliciesIsMutable();
policies_.set(index, value);
onChanged();
} else {
policiesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder setPolicies(int index, com.google.iam.v2beta.Policy.Builder builderForValue) {
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
policies_.set(index, builderForValue.build());
onChanged();
} else {
policiesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder addPolicies(com.google.iam.v2beta.Policy value) {
if (policiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePoliciesIsMutable();
policies_.add(value);
onChanged();
} else {
policiesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder addPolicies(int index, com.google.iam.v2beta.Policy value) {
if (policiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePoliciesIsMutable();
policies_.add(index, value);
onChanged();
} else {
policiesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder addPolicies(com.google.iam.v2beta.Policy.Builder builderForValue) {
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
policies_.add(builderForValue.build());
onChanged();
} else {
policiesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder addPolicies(int index, com.google.iam.v2beta.Policy.Builder builderForValue) {
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
policies_.add(index, builderForValue.build());
onChanged();
} else {
policiesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder addAllPolicies(
java.lang.Iterable<? extends com.google.iam.v2beta.Policy> values) {
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policies_);
onChanged();
} else {
policiesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder clearPolicies() {
if (policiesBuilder_ == null) {
policies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
policiesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public Builder removePolicies(int index) {
if (policiesBuilder_ == null) {
ensurePoliciesIsMutable();
policies_.remove(index);
onChanged();
} else {
policiesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public com.google.iam.v2beta.Policy.Builder getPoliciesBuilder(int index) {
return getPoliciesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public com.google.iam.v2beta.PolicyOrBuilder getPoliciesOrBuilder(int index) {
if (policiesBuilder_ == null) {
return policies_.get(index);
} else {
return policiesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public java.util.List<? extends com.google.iam.v2beta.PolicyOrBuilder>
getPoliciesOrBuilderList() {
if (policiesBuilder_ != null) {
return policiesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(policies_);
}
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public com.google.iam.v2beta.Policy.Builder addPoliciesBuilder() {
return getPoliciesFieldBuilder()
.addBuilder(com.google.iam.v2beta.Policy.getDefaultInstance());
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public com.google.iam.v2beta.Policy.Builder addPoliciesBuilder(int index) {
return getPoliciesFieldBuilder()
.addBuilder(index, com.google.iam.v2beta.Policy.getDefaultInstance());
}
/**
*
*
* <pre>
* Metadata for the policies that are attached to the resource.
* </pre>
*
* <code>repeated .google.iam.v2beta.Policy policies = 1;</code>
*/
public java.util.List<com.google.iam.v2beta.Policy.Builder> getPoliciesBuilderList() {
return getPoliciesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.v2beta.Policy,
com.google.iam.v2beta.Policy.Builder,
com.google.iam.v2beta.PolicyOrBuilder>
getPoliciesFieldBuilder() {
if (policiesBuilder_ == null) {
policiesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.iam.v2beta.Policy,
com.google.iam.v2beta.Policy.Builder,
com.google.iam.v2beta.PolicyOrBuilder>(
policies_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
policies_ = null;
}
return policiesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token that you can use in a [ListPoliciesRequest][google.iam.v2beta.ListPoliciesRequest] to retrieve the
* next page. If this field is omitted, there are no additional pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.iam.v2beta.ListPoliciesResponse)
}
// @@protoc_insertion_point(class_scope:google.iam.v2beta.ListPoliciesResponse)
private static final com.google.iam.v2beta.ListPoliciesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.iam.v2beta.ListPoliciesResponse();
}
public static com.google.iam.v2beta.ListPoliciesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPoliciesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPoliciesResponse>() {
@java.lang.Override
public ListPoliciesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPoliciesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPoliciesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.iam.v2beta.ListPoliciesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hive | 35,655 | ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import static org.apache.hadoop.hive.ql.exec.Utilities.DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.io.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRJobConfig;
import com.google.common.io.Files;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
public class TestUtilities {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
public static final Logger LOG = LoggerFactory.getLogger(TestUtilities.class);
private static final int NUM_BUCKETS = 3;
@Test
public void testGetFileExtension() {
JobConf jc = new JobConf();
assertEquals("No extension for uncompressed unknown format", "",
getFileExtension(jc, false, null));
assertEquals("No extension for compressed unknown format", "",
getFileExtension(jc, true, null));
assertEquals("No extension for uncompressed text format", "",
getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat()));
assertEquals("Deflate for uncompressed text format", ".deflate",
getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat()));
String extension = ".myext";
jc.set("hive.output.file.extension", extension);
assertEquals("Custom extension for uncompressed unknown format", extension,
getFileExtension(jc, false, null));
assertEquals("Custom extension for compressed unknown format", extension,
getFileExtension(jc, true, null));
assertEquals("Custom extension for uncompressed text format", extension,
getFileExtension(jc, false, new HiveIgnoreKeyTextOutputFormat()));
assertEquals("Custom extension for uncompressed text format", extension,
getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat()));
}
@Test
public void testSerializeTimestamp() {
Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(constant);
ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo,
new GenericUDFFromUtcTimestamp(), children);
assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression(
SerializationUtilities.serializeExpression(desc)).getExprString());
}
@Test
public void testGetDbTableName() throws HiveException{
String tablename;
String [] dbtab;
SessionState.start(new HiveConfForTest(getClass()));
String curDefaultdb = SessionState.get().getCurrentDatabase();
//test table without db portion
tablename = "tab1";
dbtab = Utilities.getDbTableName(tablename);
assertEquals("db name", curDefaultdb, dbtab[0]);
assertEquals("table name", tablename, dbtab[1]);
//test table with db portion
tablename = "dab1.tab1";
dbtab = Utilities.getDbTableName(tablename);
assertEquals("db name", "dab1", dbtab[0]);
assertEquals("table name", "tab1", dbtab[1]);
// test table name with metadata table name
tablename = "dab1.tab1.meta1";
dbtab = Utilities.getDbTableName(tablename);
assertEquals("db name", "dab1", dbtab[0]);
assertEquals("table name", "tab1", dbtab[1]);
assertEquals("metadata table name", "meta1", dbtab[2]);
//test invalid table name
tablename = "dab1.tab1.x1.y";
try {
dbtab = Utilities.getDbTableName(tablename);
fail("exception was expected for invalid table name");
} catch(HiveException ex){
assertEquals("Invalid table name " + tablename, ex.getMessage());
}
}
@Test
public void testReplaceTaskId() {
String taskID = "000000";
int bucketNum = 1;
String newTaskID = Utilities.replaceTaskId(taskID, bucketNum);
Assert.assertEquals("000001", newTaskID);
taskID = "(ds%3D1)000001";
newTaskID = Utilities.replaceTaskId(taskID, 5);
Assert.assertEquals("(ds%3D1)000005", newTaskID);
}
@Test
public void testRemoveTempOrDuplicateFilesOnTezNoDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("tez", false);
assertEquals(0, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnTezWithDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("tez", true);
assertEquals(0, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnMrNoDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("mr", false);
assertEquals(NUM_BUCKETS, paths.size());
}
@Test
public void testRemoveTempOrDuplicateFilesOnMrWithDp() throws Exception {
List<Path> paths = runRemoveTempOrDuplicateFilesTestCase("mr", true);
assertEquals(NUM_BUCKETS, paths.size());
}
@Test
public void testRenameFilesNotExists() throws Exception {
FileSystem fs = mock(FileSystem.class);
Path src = new Path("src");
Path dest = new Path("dir");
when(fs.exists(dest)).thenReturn(false);
when(fs.rename(src, dest)).thenReturn(true);
Utilities.renameOrMoveFiles(fs, src, dest);
verify(fs, times(1)).rename(src, dest);
}
@Test
public void testRenameFileExistsNonHive() throws Exception {
FileSystem fs = mock(FileSystem.class);
Path src = new Path("src");
Path dest = new Path("dir1");
Path finalPath = new Path(dest, "src_2");
FileStatus status = new FileStatus();
status.setPath(src);
when(fs.listStatus(src)).thenReturn(new FileStatus[]{status});
when(fs.exists(dest)).thenReturn(true);
when(fs.exists(new Path(dest, "src"))).thenReturn(true);
when(fs.exists(new Path(dest,"src_1"))).thenReturn(true);
when(fs.rename(src, finalPath)).thenReturn(true);
Utilities.renameOrMoveFiles(fs, src, dest);
verify(fs, times(1)).rename(src, finalPath);
}
@Test
public void testRenameFileExistsHivePath() throws Exception {
FileSystem fs = mock(FileSystem.class);
Path src = new Path("00001_02");
Path dest = new Path("dir1");
Path finalPath = new Path(dest, "00001_02_copy_2");
FileStatus status = new FileStatus();
status.setPath(src);
when(fs.listStatus(src)).thenReturn(new FileStatus[]{status});
when(fs.exists(dest)).thenReturn(true);
when(fs.exists(new Path(dest, "00001_02"))).thenReturn(true);
when(fs.exists(new Path(dest,"00001_02_copy_1"))).thenReturn(true);
when(fs.rename(src, finalPath)).thenReturn(true);
Utilities.renameOrMoveFiles(fs, src, dest);
verify(fs, times(1)).rename(src, finalPath);
}
private List<Path> runRemoveTempOrDuplicateFilesTestCase(String executionEngine, boolean dPEnabled)
throws Exception {
Configuration hconf = new HiveConfForTest(getClass());
// do this to verify that Utilities.removeTempOrDuplicateFiles does not revert to default scheme information
hconf.set("fs.defaultFS", "hdfs://should-not-be-used/");
hconf.set(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, executionEngine);
FileSystem localFs = FileSystem.getLocal(hconf);
DynamicPartitionCtx dpCtx = getDynamicPartitionCtx(dPEnabled);
Path tempDirPath = setupTempDirWithSingleOutputFile(hconf);
FileSinkDesc conf = getFileSinkDesc(tempDirPath);
// HIVE-23354 enforces that MR speculative execution is disabled
hconf.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
hconf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
List<Path> paths = Utilities.removeTempOrDuplicateFiles(localFs, tempDirPath, dpCtx, conf, hconf, false);
String expectedScheme = tempDirPath.toUri().getScheme();
String expectedAuthority = tempDirPath.toUri().getAuthority();
assertPathsMatchSchemeAndAuthority(expectedScheme, expectedAuthority, paths);
return paths;
}
private void assertPathsMatchSchemeAndAuthority(String expectedScheme, String expectedAuthority, List<Path> paths) {
for (Path path : paths) {
assertEquals(path.toUri().getScheme().toLowerCase(), expectedScheme.toLowerCase());
assertEquals(path.toUri().getAuthority(), expectedAuthority);
}
}
private DynamicPartitionCtx getDynamicPartitionCtx(boolean dPEnabled) {
DynamicPartitionCtx dpCtx = null;
if (dPEnabled) {
dpCtx = mock(DynamicPartitionCtx.class);
when(dpCtx.getNumDPCols()).thenReturn(0);
when(dpCtx.getNumBuckets()).thenReturn(NUM_BUCKETS);
}
return dpCtx;
}
private FileSinkDesc getFileSinkDesc(Path tempDirPath) {
Table table = mock(Table.class);
when(table.getNumBuckets()).thenReturn(NUM_BUCKETS);
TableDesc tInfo = Utilities.getTableDesc("s", "string");
FileSinkDesc conf = new FileSinkDesc(tempDirPath, tInfo, false);
conf.setTable(table);
return conf;
}
private Path setupTempDirWithSingleOutputFile(Configuration hconf) throws IOException {
Path tempDirPath = new Path("file://" + temporaryFolder.newFolder().getAbsolutePath());
Path taskOutputPath = new Path(tempDirPath, Utilities.getTaskId(hconf));
FileSystem.getLocal(hconf).create(taskOutputPath).close();
return tempDirPath;
}
/**
* Check that calling {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)}
* can process two different tables that both have empty partitions.
*/
@Test
public void testGetInputPathsWithEmptyPartitions() throws Exception {
String alias1Name = "alias1";
String alias2Name = "alias2";
MapWork mapWork1 = new MapWork();
MapWork mapWork2 = new MapWork();
JobConf jobConf = new JobConf();
Configuration conf = new Configuration();
Path nonExistentPath1 = new Path(UUID.randomUUID().toString());
Path nonExistentPath2 = new Path(UUID.randomUUID().toString());
PartitionDesc mockPartitionDesc = mock(PartitionDesc.class);
TableDesc mockTableDesc = mock(TableDesc.class);
when(mockTableDesc.isNonNative()).thenReturn(false);
when(mockTableDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getTableDesc()).thenReturn(mockTableDesc);
doReturn(HiveSequenceFileOutputFormat.class).when(
mockPartitionDesc).getOutputFileFormatClass();
mapWork1.setPathToAliases(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath1, Lists.newArrayList(alias1Name))));
mapWork1.setAliasToWork(new LinkedHashMap<>(
ImmutableMap.of(alias1Name, (Operator<?>) mock(Operator.class))));
mapWork1.setPathToPartitionInfo(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath1, mockPartitionDesc)));
mapWork2.setPathToAliases(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath2, Lists.newArrayList(alias2Name))));
mapWork2.setAliasToWork(new LinkedHashMap<>(
ImmutableMap.of(alias2Name, (Operator<?>) mock(Operator.class))));
mapWork2.setPathToPartitionInfo(new LinkedHashMap<>(
ImmutableMap.of(nonExistentPath2, mockPartitionDesc)));
List<Path> inputPaths = new ArrayList<>();
try {
Path scratchDir = new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCAL_SCRATCH_DIR));
List<Path> inputPaths1 = Utilities.getInputPaths(jobConf, mapWork1, scratchDir,
mock(Context.class), false);
inputPaths.addAll(inputPaths1);
assertEquals(inputPaths1.size(), 1);
assertNotEquals(inputPaths1.get(0), nonExistentPath1);
assertTrue(inputPaths1.get(0).getFileSystem(conf).exists(inputPaths1.get(0)));
assertFalse(nonExistentPath1.getFileSystem(conf).exists(nonExistentPath1));
List<Path> inputPaths2 = Utilities.getInputPaths(jobConf, mapWork2, scratchDir,
mock(Context.class), false);
inputPaths.addAll(inputPaths2);
assertEquals(inputPaths2.size(), 1);
assertNotEquals(inputPaths2.get(0), nonExistentPath2);
assertTrue(inputPaths2.get(0).getFileSystem(conf).exists(inputPaths2.get(0)));
assertFalse(nonExistentPath2.getFileSystem(conf).exists(nonExistentPath2));
} finally {
File file;
for (Path path : inputPaths) {
file = new File(path.toString());
if (file.exists()) {
file.delete();
}
}
}
}
/**
* Check that calling {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)}
* can process two different tables that both have empty partitions when using multiple threads.
* Some extra logic is placed at the end of the test to validate no race conditions put the
* {@link MapWork} object in an invalid state.
*/
@Test
public void testGetInputPathsWithMultipleThreadsAndEmptyPartitions() throws Exception {
int numPartitions = 15;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname,
Runtime.getRuntime().availableProcessors() * 2);
MapWork mapWork = new MapWork();
Path testTablePath = new Path("testTable");
Path[] testPartitionsPaths = new Path[numPartitions];
PartitionDesc mockPartitionDesc = mock(PartitionDesc.class);
TableDesc mockTableDesc = mock(TableDesc.class);
when(mockTableDesc.isNonNative()).thenReturn(false);
when(mockTableDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getProperties()).thenReturn(new Properties());
when(mockPartitionDesc.getTableDesc()).thenReturn(mockTableDesc);
doReturn(HiveSequenceFileOutputFormat.class).when(
mockPartitionDesc).getOutputFileFormatClass();
for (int i = 0; i < numPartitions; i++) {
String testPartitionName = "p=" + i;
testPartitionsPaths[i] = new Path(testTablePath, "p=" + i);
mapWork.getPathToAliases().put(testPartitionsPaths[i], Lists.newArrayList(testPartitionName));
mapWork.getAliasToWork().put(testPartitionName, mock(Operator.class));
mapWork.getPathToPartitionInfo().put(testPartitionsPaths[i], mockPartitionDesc);
}
FileSystem fs = FileSystem.getLocal(jobConf);
try {
fs.mkdirs(testTablePath);
List<Path> inputPaths = Utilities.getInputPaths(jobConf, mapWork,
new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCAL_SCRATCH_DIR)), mock(Context.class), false);
assertEquals(inputPaths.size(), numPartitions);
for (int i = 0; i < numPartitions; i++) {
assertNotEquals(inputPaths.get(i), testPartitionsPaths[i]);
}
assertEquals(mapWork.getPathToAliases().size(), numPartitions);
assertEquals(mapWork.getPathToPartitionInfo().size(), numPartitions);
assertEquals(mapWork.getAliasToWork().size(), numPartitions);
for (Map.Entry<Path, List<String>> entry : mapWork.getPathToAliases().entrySet()) {
assertNotNull(entry.getKey());
assertNotNull(entry.getValue());
assertEquals(entry.getValue().size(), 1);
assertTrue(entry.getKey().getFileSystem(new Configuration()).exists(entry.getKey()));
}
} finally {
if (fs.exists(testTablePath)) {
fs.delete(testTablePath, true);
}
}
}
/**
* Check that calling {@link Utilities#getMaxExecutorsForInputListing(Configuration, int)}
* returns the maximum number of executors to use based on the number of input locations.
*/
@Test
public void testGetMaxExecutorsForInputListing() {
Configuration conf = new Configuration();
final int ZERO_EXECUTORS = 0;
final int ONE_EXECUTOR = 1;
final int TWO_EXECUTORS = 2;
final int ZERO_THREADS = 0;
final int ONE_THREAD = 1;
final int TWO_THREADS = 2;
final int ZERO_LOCATIONS = 0;
final int ONE_LOCATION = 1;
final int TWO_LOCATIONS = 2;
final int THREE_LOCATIONS = 3;
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, TWO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
/*
* The following tests will verify the deprecation variable is still usable.
*/
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ZERO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, ZERO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, TWO_THREADS);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
// Check that HIVE_EXEC_INPUT_LISTING_MAX_THREADS has priority overr DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX
conf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, TWO_THREADS);
conf.setInt(DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX, ONE_THREAD);
assertEquals(ZERO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, ZERO_LOCATIONS));
assertEquals(ONE_EXECUTOR, Utilities.getMaxExecutorsForInputListing(conf, ONE_LOCATION));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, TWO_LOCATIONS));
assertEquals(TWO_EXECUTORS, Utilities.getMaxExecutorsForInputListing(conf, THREE_LOCATIONS));
}
/**
* Test for {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)} with a single
* threaded.
*/
@Test
public void testGetInputPathsWithASingleThread() throws Exception {
final int NUM_PARTITIONS = 5;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, 1);
runTestGetInputPaths(jobConf, NUM_PARTITIONS);
}
/**
* Test for {@link Utilities#getInputPaths(JobConf, MapWork, Path, Context, boolean)} with multiple
* threads.
*/
@Test
public void testGetInputPathsWithMultipleThreads() throws Exception {
final int NUM_PARTITIONS = 5;
JobConf jobConf = new JobConf();
jobConf.setInt(HiveConf.ConfVars.HIVE_EXEC_INPUT_LISTING_MAX_THREADS.varname, 2);
runTestGetInputPaths(jobConf, NUM_PARTITIONS);
}
private void runTestGetInputPaths(JobConf jobConf, int numOfPartitions) throws Exception {
MapWork mapWork = new MapWork();
Path scratchDir = new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCAL_SCRATCH_DIR));
Map<Path, List<String>> pathToAliasTable = new LinkedHashMap<>();
String testTableName = "testTable";
Path testTablePath = new Path(testTableName);
Path[] testPartitionsPaths = new Path[numOfPartitions];
for (int i=0; i<numOfPartitions; i++) {
String testPartitionName = "p=" + i;
testPartitionsPaths[i] = new Path(testTablePath, "p=" + i);
pathToAliasTable.put(testPartitionsPaths[i], Lists.newArrayList(testPartitionName));
mapWork.getAliasToWork().put(testPartitionName, mock(Operator.class));
}
mapWork.setPathToAliases(pathToAliasTable);
FileSystem fs = FileSystem.getLocal(jobConf);
try {
fs.mkdirs(testTablePath);
for (int i=0; i<numOfPartitions; i++) {
fs.mkdirs(testPartitionsPaths[i]);
fs.create(new Path(testPartitionsPaths[i], "test1.txt")).close();
}
List<Path> inputPaths = Utilities.getInputPaths(jobConf, mapWork, scratchDir, mock(Context.class), false);
assertEquals(inputPaths.size(), numOfPartitions);
for (int i=0; i<numOfPartitions; i++) {
assertEquals(inputPaths.get(i), testPartitionsPaths[i]);
}
} finally {
if (fs.exists(testTablePath)) {
fs.delete(testTablePath, true);
}
}
}
@Test
public void testGetInputPathsPool() throws IOException, ExecutionException, InterruptedException {
List<Path> pathsToAdd = new ArrayList<>();
Path path = new Path("dummy-path");
pathsToAdd.add(path);
pathsToAdd.add(path);
pathsToAdd.add(path);
ExecutorService pool = mock(ExecutorService.class);
Future mockFuture = mock(Future.class);
when(mockFuture.get()).thenReturn(path);
when(pool.submit(any(Callable.class))).thenReturn(mockFuture);
Utilities.getInputPathsWithPool(mock(JobConf.class), mock(MapWork.class), mock(Path.class), mock(Context.class),
false, pathsToAdd, pool);
verify(pool, times(3)).submit(any(Callable.class));
verify(pool).shutdown();
verify(pool).shutdownNow();
}
@Test
public void testGetInputPathsPoolAndFailure() throws IOException, ExecutionException, InterruptedException {
List<Path> pathsToAdd = new ArrayList<>();
Path path = new Path("dummy-path");
pathsToAdd.add(path);
pathsToAdd.add(path);
pathsToAdd.add(path);
ExecutorService pool = mock(ExecutorService.class);
Future mockFuture = mock(Future.class);
when(mockFuture.get()).thenThrow(new RuntimeException());
when(pool.submit(any(Callable.class))).thenReturn(mockFuture);
Exception e = null;
try {
Utilities.getInputPathsWithPool(mock(JobConf.class), mock(MapWork.class), mock(Path.class), mock(Context.class),
false, pathsToAdd, pool);
} catch (Exception thrownException) {
e = thrownException;
}
assertNotNull(e);
verify(pool, times(3)).submit(any(Callable.class));
verify(pool).shutdownNow();
}
private Task<?> getDependencyCollectionTask(){
return TaskFactory.get(new DependencyCollectionWork());
}
/**
* Generates a task graph that looks like this:
*
* ---->DTa----
* / \
* root ----->DTb-----*-->DTd---> ProvidedTask --> DTe
* \ /
* ---->DTc----
*/
private List<Task<?>> getTestDiamondTaskGraph(Task<?> providedTask){
// Note: never instantiate a task without TaskFactory.get() if you're not
// okay with .equals() breaking. Doing it via TaskFactory.get makes sure
// that an id is generated, and two tasks of the same type don't show
// up as "equal", which is important for things like iterating over an
// array. Without this, DTa, DTb, and DTc would show up as one item in
// the list of children. Thus, we're instantiating via a helper method
// that instantiates via TaskFactory.get()
Task<?> root = getDependencyCollectionTask();
Task<?> DTa = getDependencyCollectionTask();
Task<?> DTb = getDependencyCollectionTask();
Task<?> DTc = getDependencyCollectionTask();
Task<?> DTd = getDependencyCollectionTask();
Task<?> DTe = getDependencyCollectionTask();
root.addDependentTask(DTa);
root.addDependentTask(DTb);
root.addDependentTask(DTc);
DTa.addDependentTask(DTd);
DTb.addDependentTask(DTd);
DTc.addDependentTask(DTd);
DTd.addDependentTask(providedTask);
providedTask.addDependentTask(DTe);
List<Task<?>> retVals = new ArrayList<Task<?>>();
retVals.add(root);
return retVals;
}
/**
* DependencyCollectionTask that counts how often getDependentTasks on it
* (and thus, on its descendants) is called counted via Task.getDependentTasks.
* It is used to wrap another task to intercept calls on it.
*/
public class CountingWrappingTask extends DependencyCollectionTask {
int count;
Task<?> wrappedDep = null;
public CountingWrappingTask(Task<?> dep) {
count = 0;
wrappedDep = dep;
super.addDependentTask(wrappedDep);
}
@Override
public boolean addDependentTask(Task<?> dependent) {
return wrappedDep.addDependentTask(dependent);
}
@Override
public List<Task<?>> getDependentTasks() {
count++;
System.err.println("YAH:getDepTasks got called!");
(new Exception()).printStackTrace(System.err);
LOG.info("YAH!getDepTasks", new Exception());
return super.getDependentTasks();
}
public int getDepCallCount() {
return count;
}
@Override
public String getName() {
return "COUNTER_TASK";
}
@Override
public String toString() {
return getName() + "_" + wrappedDep.toString();
}
};
/**
* This test tests that Utilities.get*Tasks do not repeat themselves in the process
* of extracting tasks from a given set of root tasks when given DAGs that can have
* multiple paths, such as the case with Diamond-shaped DAGs common to replication.
*/
@Test
public void testGetTasksHaveNoRepeats() {
CountingWrappingTask mrTask = new CountingWrappingTask(new ExecDriver());
CountingWrappingTask tezTask = new CountingWrappingTask(new TezTask());
// First check - we should not have repeats in results
assertEquals("No repeated MRTasks from Utilities.getMRTasks", 1,
Utilities.getMRTasks(getTestDiamondTaskGraph(mrTask)).size());
assertEquals("No repeated TezTasks from Utilities.getTezTasks", 1,
Utilities.getTezTasks(getTestDiamondTaskGraph(tezTask)).size());
// Second check - the tasks we looked for must not have been accessed more than
// once as a result of the traversal (note that we actually wind up accessing
// 2 times , because each visit counts twice, once to check for existence, and
// once to visit.
assertEquals("MRTasks should have been visited only once", 2, mrTask.getDepCallCount());
assertEquals("TezTasks should have been visited only once", 2, tezTask.getDepCallCount());
}
private static Task<MapredWork> getMapredWork() {
return TaskFactory.get(MapredWork.class);
}
@Test
@SuppressWarnings("unchecked")
public void testGetTasksRecursion() {
Task<MapredWork> rootTask = getMapredWork();
Task<MapredWork> child1 = getMapredWork();
Task<MapredWork> child2 = getMapredWork();
Task<MapredWork> child11 = getMapredWork();
rootTask.addDependentTask(child1);
rootTask.addDependentTask(child2);
child1.addDependentTask(child11);
assertEquals(Lists.newArrayList(rootTask, child1, child2, child11),
Utilities.getMRTasks(getTestDiamondTaskGraph(rootTask)));
}
@Test
public void testSelectManifestFilesOnlyOneAttemptId() {
FileStatus[] manifestFiles = generateTestNotEmptyFileStatuses("000000_0.manifest", "000001_0.manifest",
"000002_0.manifest", "000003_0.manifest");
Set<String> expectedPathes =
getExpectedPathes("000000_0.manifest", "000001_0.manifest", "000002_0.manifest", "000003_0.manifest");
List<Path> foundManifestFiles = Utilities.selectManifestFiles(manifestFiles);
Set<String> resultPathes = getResultPathes(foundManifestFiles);
assertEquals(expectedPathes, resultPathes);
}
@Test
public void testSelectManifestFilesMultipleAttemptIds() {
FileStatus[] manifestFiles = generateTestNotEmptyFileStatuses("000000_1.manifest", "000000_0.manifest",
"000000_3.manifest", "000000_2.manifest", "000003_0.manifest", "000003_1.manifest", "000003_2.manifest");
Set<String> expectedPathes = getExpectedPathes("000000_3.manifest", "000003_2.manifest");
List<Path> foundManifestFiles = Utilities.selectManifestFiles(manifestFiles);
Set<String> resultPathes = getResultPathes(foundManifestFiles);
assertEquals(expectedPathes, resultPathes);
}
@Test
public void testSelectManifestFilesWithEmptyManifests() {
Set<String> emptyFiles = new HashSet<>();
emptyFiles.add("000001_0.manifest");
emptyFiles.add("000001_2.manifest");
emptyFiles.add("000002_2.manifest");
FileStatus[] manifestFiles = generateTestNotEmptyFileStatuses(emptyFiles, "000001_1.manifest", "000001_0.manifest",
"000001_3.manifest", "000001_2.manifest", "000002_0.manifest", "000002_1.manifest", "000002_2.manifest");
Set<String> expectedPathes = getExpectedPathes("000001_3.manifest", "000002_1.manifest");
List<Path> foundManifestFiles = Utilities.selectManifestFiles(manifestFiles);
Set<String> resultPathes = getResultPathes(foundManifestFiles);
assertEquals(expectedPathes, resultPathes);
}
@Test
public void testSelectManifestFilesWithWrongManifestNames() {
FileStatus[] manifestFiles = generateTestNotEmptyFileStatuses("000004_0.manifest", "000005.manifest",
"000004_1.manifest", "000006.manifest", "000007_0.wrong", "000008_1", "000004_2.manifest");
Set<String> expectedPathes = getExpectedPathes("000005.manifest", "000006.manifest", "000004_2.manifest");
List<Path> foundManifestFiles = Utilities.selectManifestFiles(manifestFiles);
Set<String> resultPathes = getResultPathes(foundManifestFiles);
assertEquals(expectedPathes, resultPathes);
}
@Test
public void testSetPermissionsOnExistingDir() throws IOException {
File tmpDir = Files.createTempDir();
Path path = new Path(tmpDir.getPath());
HiveConf conf = new HiveConf(this.getClass());
FileSystem fs = path.getFileSystem(conf);
fs.setPermission(path, new FsPermission((short) 00700));
Utilities.ensurePathIsWritable(path, conf);
Assert.assertEquals((short) 0733, fs.getFileStatus(path).getPermission().toShort());
// Test with more open permissions than required, but still not writable,
// it should just make the directory writable without restricting the existing permissions
fs.setPermission(path, new FsPermission((short) 00755));
Utilities.ensurePathIsWritable(path, conf);
Assert.assertEquals((short) 0777, fs.getFileStatus(path).getPermission().toShort());
}
private FileStatus[] generateTestNotEmptyFileStatuses(String... fileNames) {
return generateTestNotEmptyFileStatuses(null, fileNames);
}
private FileStatus[] generateTestNotEmptyFileStatuses(Set<String> emptyFiles, String... fileNames) {
FileStatus[] manifestFiles = new FileStatus[fileNames.length];
for (int i = 0; i < fileNames.length; i++) {
long len = 10000L;
if (emptyFiles != null && emptyFiles.contains(fileNames[i])) {
len = 0L;
}
manifestFiles[i] = new FileStatus(len, false, 0, 250L, 123456L, new Path("/sometestpath/" + fileNames[i]));
}
return manifestFiles;
}
private Set<String> getExpectedPathes(String... fileNames) {
Set<String> expectedPathes = new HashSet<>();
for (String fileName : fileNames) {
expectedPathes.add("/sometestpath/" + fileName);
}
return expectedPathes;
}
private Set<String> getResultPathes(List<Path> foundManifestFiles) {
Set<String> resultPathes = new HashSet<>();
for (Path path : foundManifestFiles) {
resultPathes.add(path.toString());
}
return resultPathes;
}
}
|
apache/jackrabbit-oak | 35,630 | oak-lucene/src/main/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java | /*
* COPIED FROM APACHE LUCENE 4.7.2
*
* Git URL: git@github.com:apache/lucene.git, tag: releases/lucene-solr/4.7.2, path: lucene/core/src/java
*
* (see https://issues.apache.org/jira/browse/OAK-10786 for details)
*/
package org.apache.lucene.codecs.compressing;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.BLOCK_SIZE;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.CODEC_SFX_DAT;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.CODEC_SFX_IDX;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.FLAGS_BITS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.OFFSETS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.PAYLOADS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.POSITIONS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_CURRENT;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_START;
import java.io.Closeable;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.BlockPackedReaderIterator;
import org.apache.lucene.util.packed.PackedInts;
/**
* {@link TermVectorsReader} for {@link CompressingTermVectorsFormat}.
* @lucene.experimental
*/
public final class CompressingTermVectorsReader extends TermVectorsReader implements Closeable {
private final FieldInfos fieldInfos;
final CompressingStoredFieldsIndexReader indexReader;
final IndexInput vectorsStream;
private final int packedIntsVersion;
private final CompressionMode compressionMode;
private final Decompressor decompressor;
private final int chunkSize;
private final int numDocs;
private boolean closed;
private final BlockPackedReaderIterator reader;
// used by clone
private CompressingTermVectorsReader(CompressingTermVectorsReader reader) {
this.fieldInfos = reader.fieldInfos;
this.vectorsStream = reader.vectorsStream.clone();
this.indexReader = reader.indexReader.clone();
this.packedIntsVersion = reader.packedIntsVersion;
this.compressionMode = reader.compressionMode;
this.decompressor = reader.decompressor.clone();
this.chunkSize = reader.chunkSize;
this.numDocs = reader.numDocs;
this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, BLOCK_SIZE, 0);
this.closed = false;
}
/** Sole constructor. */
public CompressingTermVectorsReader(Directory d, SegmentInfo si, String segmentSuffix, FieldInfos fn,
IOContext context, String formatName, CompressionMode compressionMode) throws IOException {
this.compressionMode = compressionMode;
final String segment = si.name;
boolean success = false;
fieldInfos = fn;
numDocs = si.getDocCount();
IndexInput indexStream = null;
try {
// Load the index into memory
final String indexStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION);
indexStream = d.openInput(indexStreamFN, context);
final String codecNameIdx = formatName + CODEC_SFX_IDX;
CodecUtil.checkHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT);
assert CodecUtil.headerLength(codecNameIdx) == indexStream.getFilePointer();
indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
indexStream.close();
indexStream = null;
// Open the data file and read metadata
final String vectorsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION);
vectorsStream = d.openInput(vectorsStreamFN, context);
final String codecNameDat = formatName + CODEC_SFX_DAT;
CodecUtil.checkHeader(vectorsStream, codecNameDat, VERSION_START, VERSION_CURRENT);
assert CodecUtil.headerLength(codecNameDat) == vectorsStream.getFilePointer();
packedIntsVersion = vectorsStream.readVInt();
chunkSize = vectorsStream.readVInt();
decompressor = compressionMode.newDecompressor();
this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, BLOCK_SIZE, 0);
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this, indexStream);
}
}
}
CompressionMode getCompressionMode() {
return compressionMode;
}
int getChunkSize() {
return chunkSize;
}
int getPackedIntsVersion() {
return packedIntsVersion;
}
CompressingStoredFieldsIndexReader getIndex() {
return indexReader;
}
IndexInput getVectorsStream() {
return vectorsStream;
}
/**
* @throws AlreadyClosedException if this TermVectorsReader is closed
*/
private void ensureOpen() throws AlreadyClosedException {
if (closed) {
throw new AlreadyClosedException("this FieldsReader is closed");
}
}
@Override
public void close() throws IOException {
if (!closed) {
IOUtils.close(vectorsStream);
closed = true;
}
}
@Override
public TermVectorsReader clone() {
return new CompressingTermVectorsReader(this);
}
@Override
public Fields get(int doc) throws IOException {
ensureOpen();
// seek to the right place
{
final long startPointer = indexReader.getStartPointer(doc);
vectorsStream.seek(startPointer);
}
// decode
// - docBase: first doc ID of the chunk
// - chunkDocs: number of docs of the chunk
final int docBase = vectorsStream.readVInt();
final int chunkDocs = vectorsStream.readVInt();
if (doc < docBase || doc >= docBase + chunkDocs || docBase + chunkDocs > numDocs) {
throw new CorruptIndexException("docBase=" + docBase + ",chunkDocs=" + chunkDocs + ",doc=" + doc + " (resource=" + vectorsStream + ")");
}
final int skip; // number of fields to skip
final int numFields; // number of fields of the document we're looking for
final int totalFields; // total number of fields of the chunk (sum for all docs)
if (chunkDocs == 1) {
skip = 0;
numFields = totalFields = vectorsStream.readVInt();
} else {
reader.reset(vectorsStream, chunkDocs);
int sum = 0;
for (int i = docBase; i < doc; ++i) {
sum += reader.next();
}
skip = sum;
numFields = (int) reader.next();
sum += numFields;
for (int i = doc + 1; i < docBase + chunkDocs; ++i) {
sum += reader.next();
}
totalFields = sum;
}
if (numFields == 0) {
// no vectors
return null;
}
// read field numbers that have term vectors
final int[] fieldNums;
{
final int token = vectorsStream.readByte() & 0xFF;
assert token != 0; // means no term vectors, cannot happen since we checked for numFields == 0
final int bitsPerFieldNum = token & 0x1F;
int totalDistinctFields = token >>> 5;
if (totalDistinctFields == 0x07) {
totalDistinctFields += vectorsStream.readVInt();
}
++totalDistinctFields;
final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalDistinctFields, bitsPerFieldNum, 1);
fieldNums = new int[totalDistinctFields];
for (int i = 0; i < totalDistinctFields; ++i) {
fieldNums[i] = (int) it.next();
}
}
// read field numbers and flags
final int[] fieldNumOffs = new int[numFields];
final PackedInts.Reader flags;
{
final int bitsPerOff = PackedInts.bitsRequired(fieldNums.length - 1);
final PackedInts.Reader allFieldNumOffs = PackedInts.getReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, bitsPerOff);
switch (vectorsStream.readVInt()) {
case 0:
final PackedInts.Reader fieldFlags = PackedInts.getReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, fieldNums.length, FLAGS_BITS);
PackedInts.Mutable f = PackedInts.getMutable(totalFields, FLAGS_BITS, PackedInts.COMPACT);
for (int i = 0; i < totalFields; ++i) {
final int fieldNumOff = (int) allFieldNumOffs.get(i);
assert fieldNumOff >= 0 && fieldNumOff < fieldNums.length;
final int fgs = (int) fieldFlags.get(fieldNumOff);
f.set(i, fgs);
}
flags = f;
break;
case 1:
flags = PackedInts.getReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, FLAGS_BITS);
break;
default:
throw new AssertionError();
}
for (int i = 0; i < numFields; ++i) {
fieldNumOffs[i] = (int) allFieldNumOffs.get(skip + i);
}
}
// number of terms per field for all fields
final PackedInts.Reader numTerms;
final int totalTerms;
{
final int bitsRequired = vectorsStream.readVInt();
numTerms = PackedInts.getReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, bitsRequired);
int sum = 0;
for (int i = 0; i < totalFields; ++i) {
sum += numTerms.get(i);
}
totalTerms = sum;
}
// term lengths
int docOff = 0, docLen = 0, totalLen;
final int[] fieldLengths = new int[numFields];
final int[][] prefixLengths = new int[numFields][];
final int[][] suffixLengths = new int[numFields][];
{
reader.reset(vectorsStream, totalTerms);
// skip
int toSkip = 0;
for (int i = 0; i < skip; ++i) {
toSkip += numTerms.get(i);
}
reader.skip(toSkip);
// read prefix lengths
for (int i = 0; i < numFields; ++i) {
final int termCount = (int) numTerms.get(skip + i);
final int[] fieldPrefixLengths = new int[termCount];
prefixLengths[i] = fieldPrefixLengths;
for (int j = 0; j < termCount; ) {
final LongsRef next = reader.next(termCount - j);
for (int k = 0; k < next.length; ++k) {
fieldPrefixLengths[j++] = (int) next.longs[next.offset + k];
}
}
}
reader.skip(totalTerms - reader.ord());
reader.reset(vectorsStream, totalTerms);
// skip
toSkip = 0;
for (int i = 0; i < skip; ++i) {
for (int j = 0; j < numTerms.get(i); ++j) {
docOff += reader.next();
}
}
for (int i = 0; i < numFields; ++i) {
final int termCount = (int) numTerms.get(skip + i);
final int[] fieldSuffixLengths = new int[termCount];
suffixLengths[i] = fieldSuffixLengths;
for (int j = 0; j < termCount; ) {
final LongsRef next = reader.next(termCount - j);
for (int k = 0; k < next.length; ++k) {
fieldSuffixLengths[j++] = (int) next.longs[next.offset + k];
}
}
fieldLengths[i] = sum(suffixLengths[i]);
docLen += fieldLengths[i];
}
totalLen = docOff + docLen;
for (int i = skip + numFields; i < totalFields; ++i) {
for (int j = 0; j < numTerms.get(i); ++j) {
totalLen += reader.next();
}
}
}
// term freqs
final int[] termFreqs = new int[totalTerms];
{
reader.reset(vectorsStream, totalTerms);
for (int i = 0; i < totalTerms; ) {
final LongsRef next = reader.next(totalTerms - i);
for (int k = 0; k < next.length; ++k) {
termFreqs[i++] = 1 + (int) next.longs[next.offset + k];
}
}
}
// total number of positions, offsets and payloads
int totalPositions = 0, totalOffsets = 0, totalPayloads = 0;
for (int i = 0, termIndex = 0; i < totalFields; ++i) {
final int f = (int) flags.get(i);
final int termCount = (int) numTerms.get(i);
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex++];
if ((f & POSITIONS) != 0) {
totalPositions += freq;
}
if ((f & OFFSETS) != 0) {
totalOffsets += freq;
}
if ((f & PAYLOADS) != 0) {
totalPayloads += freq;
}
}
assert i != totalFields - 1 || termIndex == totalTerms : termIndex + " " + totalTerms;
}
final int[][] positionIndex = positionIndex(skip, numFields, numTerms, termFreqs);
final int[][] positions, startOffsets, lengths;
if (totalPositions > 0) {
positions = readPositions(skip, numFields, flags, numTerms, termFreqs, POSITIONS, totalPositions, positionIndex);
} else {
positions = new int[numFields][];
}
if (totalOffsets > 0) {
// average number of chars per term
final float[] charsPerTerm = new float[fieldNums.length];
for (int i = 0; i < charsPerTerm.length; ++i) {
charsPerTerm[i] = Float.intBitsToFloat(vectorsStream.readInt());
}
startOffsets = readPositions(skip, numFields, flags, numTerms, termFreqs, OFFSETS, totalOffsets, positionIndex);
lengths = readPositions(skip, numFields, flags, numTerms, termFreqs, OFFSETS, totalOffsets, positionIndex);
for (int i = 0; i < numFields; ++i) {
final int[] fStartOffsets = startOffsets[i];
final int[] fPositions = positions[i];
// patch offsets from positions
if (fStartOffsets != null && fPositions != null) {
final float fieldCharsPerTerm = charsPerTerm[fieldNumOffs[i]];
for (int j = 0; j < startOffsets[i].length; ++j) {
fStartOffsets[j] += (int) (fieldCharsPerTerm * fPositions[j]);
}
}
if (fStartOffsets != null) {
final int[] fPrefixLengths = prefixLengths[i];
final int[] fSuffixLengths = suffixLengths[i];
final int[] fLengths = lengths[i];
for (int j = 0, end = (int) numTerms.get(skip + i); j < end; ++j) {
// delta-decode start offsets and patch lengths using term lengths
final int termLength = fPrefixLengths[j] + fSuffixLengths[j];
lengths[i][positionIndex[i][j]] += termLength;
for (int k = positionIndex[i][j] + 1; k < positionIndex[i][j + 1]; ++k) {
fStartOffsets[k] += fStartOffsets[k - 1];
fLengths[k] += termLength;
}
}
}
}
} else {
startOffsets = lengths = new int[numFields][];
}
if (totalPositions > 0) {
// delta-decode positions
for (int i = 0; i < numFields; ++i) {
final int[] fPositions = positions[i];
final int[] fpositionIndex = positionIndex[i];
if (fPositions != null) {
for (int j = 0, end = (int) numTerms.get(skip + i); j < end; ++j) {
// delta-decode start offsets
for (int k = fpositionIndex[j] + 1; k < fpositionIndex[j + 1]; ++k) {
fPositions[k] += fPositions[k - 1];
}
}
}
}
}
// payload lengths
final int[][] payloadIndex = new int[numFields][];
int totalPayloadLength = 0;
int payloadOff = 0;
int payloadLen = 0;
if (totalPayloads > 0) {
reader.reset(vectorsStream, totalPayloads);
// skip
int termIndex = 0;
for (int i = 0; i < skip; ++i) {
final int f = (int) flags.get(i);
final int termCount = (int) numTerms.get(i);
if ((f & PAYLOADS) != 0) {
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex + j];
for (int k = 0; k < freq; ++k) {
final int l = (int) reader.next();
payloadOff += l;
}
}
}
termIndex += termCount;
}
totalPayloadLength = payloadOff;
// read doc payload lengths
for (int i = 0; i < numFields; ++i) {
final int f = (int) flags.get(skip + i);
final int termCount = (int) numTerms.get(skip + i);
if ((f & PAYLOADS) != 0) {
final int totalFreq = positionIndex[i][termCount];
payloadIndex[i] = new int[totalFreq + 1];
int posIdx = 0;
payloadIndex[i][posIdx] = payloadLen;
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex + j];
for (int k = 0; k < freq; ++k) {
final int payloadLength = (int) reader.next();
payloadLen += payloadLength;
payloadIndex[i][posIdx+1] = payloadLen;
++posIdx;
}
}
assert posIdx == totalFreq;
}
termIndex += termCount;
}
totalPayloadLength += payloadLen;
for (int i = skip + numFields; i < totalFields; ++i) {
final int f = (int) flags.get(i);
final int termCount = (int) numTerms.get(i);
if ((f & PAYLOADS) != 0) {
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex + j];
for (int k = 0; k < freq; ++k) {
totalPayloadLength += reader.next();
}
}
}
termIndex += termCount;
}
assert termIndex == totalTerms : termIndex + " " + totalTerms;
}
// decompress data
final BytesRef suffixBytes = new BytesRef();
decompressor.decompress(vectorsStream, totalLen + totalPayloadLength, docOff + payloadOff, docLen + payloadLen, suffixBytes);
suffixBytes.length = docLen;
final BytesRef payloadBytes = new BytesRef(suffixBytes.bytes, suffixBytes.offset + docLen, payloadLen);
final int[] fieldFlags = new int[numFields];
for (int i = 0; i < numFields; ++i) {
fieldFlags[i] = (int) flags.get(skip + i);
}
final int[] fieldNumTerms = new int[numFields];
for (int i = 0; i < numFields; ++i) {
fieldNumTerms[i] = (int) numTerms.get(skip + i);
}
final int[][] fieldTermFreqs = new int[numFields][];
{
int termIdx = 0;
for (int i = 0; i < skip; ++i) {
termIdx += numTerms.get(i);
}
for (int i = 0; i < numFields; ++i) {
final int termCount = (int) numTerms.get(skip + i);
fieldTermFreqs[i] = new int[termCount];
for (int j = 0; j < termCount; ++j) {
fieldTermFreqs[i][j] = termFreqs[termIdx++];
}
}
}
assert sum(fieldLengths) == docLen : sum(fieldLengths) + " != " + docLen;
return new TVFields(fieldNums, fieldFlags, fieldNumOffs, fieldNumTerms, fieldLengths,
prefixLengths, suffixLengths, fieldTermFreqs,
positionIndex, positions, startOffsets, lengths,
payloadBytes, payloadIndex,
suffixBytes);
}
// field -> term index -> position index
private int[][] positionIndex(int skip, int numFields, PackedInts.Reader numTerms, int[] termFreqs) {
final int[][] positionIndex = new int[numFields][];
int termIndex = 0;
for (int i = 0; i < skip; ++i) {
final int termCount = (int) numTerms.get(i);
termIndex += termCount;
}
for (int i = 0; i < numFields; ++i) {
final int termCount = (int) numTerms.get(skip + i);
positionIndex[i] = new int[termCount + 1];
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex+j];
positionIndex[i][j + 1] = positionIndex[i][j] + freq;
}
termIndex += termCount;
}
return positionIndex;
}
private int[][] readPositions(int skip, int numFields, PackedInts.Reader flags, PackedInts.Reader numTerms, int[] termFreqs, int flag, final int totalPositions, int[][] positionIndex) throws IOException {
final int[][] positions = new int[numFields][];
reader.reset(vectorsStream, totalPositions);
// skip
int toSkip = 0;
int termIndex = 0;
for (int i = 0; i < skip; ++i) {
final int f = (int) flags.get(i);
final int termCount = (int) numTerms.get(i);
if ((f & flag) != 0) {
for (int j = 0; j < termCount; ++j) {
final int freq = termFreqs[termIndex+j];
toSkip += freq;
}
}
termIndex += termCount;
}
reader.skip(toSkip);
// read doc positions
for (int i = 0; i < numFields; ++i) {
final int f = (int) flags.get(skip + i);
final int termCount = (int) numTerms.get(skip + i);
if ((f & flag) != 0) {
final int totalFreq = positionIndex[i][termCount];
final int[] fieldPositions = new int[totalFreq];
positions[i] = fieldPositions;
for (int j = 0; j < totalFreq; ) {
final LongsRef nextPositions = reader.next(totalFreq - j);
for (int k = 0; k < nextPositions.length; ++k) {
fieldPositions[j++] = (int) nextPositions.longs[nextPositions.offset + k];
}
}
}
termIndex += termCount;
}
reader.skip(totalPositions - reader.ord());
return positions;
}
private class TVFields extends Fields {
private final int[] fieldNums, fieldFlags, fieldNumOffs, numTerms, fieldLengths;
private final int[][] prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths, payloadIndex;
private final BytesRef suffixBytes, payloadBytes;
public TVFields(int[] fieldNums, int[] fieldFlags, int[] fieldNumOffs, int[] numTerms, int[] fieldLengths,
int[][] prefixLengths, int[][] suffixLengths, int[][] termFreqs,
int[][] positionIndex, int[][] positions, int[][] startOffsets, int[][] lengths,
BytesRef payloadBytes, int[][] payloadIndex,
BytesRef suffixBytes) {
this.fieldNums = fieldNums;
this.fieldFlags = fieldFlags;
this.fieldNumOffs = fieldNumOffs;
this.numTerms = numTerms;
this.fieldLengths = fieldLengths;
this.prefixLengths = prefixLengths;
this.suffixLengths = suffixLengths;
this.termFreqs = termFreqs;
this.positionIndex = positionIndex;
this.positions = positions;
this.startOffsets = startOffsets;
this.lengths = lengths;
this.payloadBytes = payloadBytes;
this.payloadIndex = payloadIndex;
this.suffixBytes = suffixBytes;
}
@Override
public Iterator<String> iterator() {
return new Iterator<String>() {
int i = 0;
@Override
public boolean hasNext() {
return i < fieldNumOffs.length;
}
@Override
public String next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final int fieldNum = fieldNums[fieldNumOffs[i++]];
return fieldInfos.fieldInfo(fieldNum).name;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Terms terms(String field) throws IOException {
final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
if (fieldInfo == null) {
return null;
}
int idx = -1;
for (int i = 0; i < fieldNumOffs.length; ++i) {
if (fieldNums[fieldNumOffs[i]] == fieldInfo.number) {
idx = i;
break;
}
}
if (idx == -1 || numTerms[idx] == 0) {
// no term
return null;
}
int fieldOff = 0, fieldLen = -1;
for (int i = 0; i < fieldNumOffs.length; ++i) {
if (i < idx) {
fieldOff += fieldLengths[i];
} else {
fieldLen = fieldLengths[i];
break;
}
}
assert fieldLen >= 0;
return new TVTerms(numTerms[idx], fieldFlags[idx],
prefixLengths[idx], suffixLengths[idx], termFreqs[idx],
positionIndex[idx], positions[idx], startOffsets[idx], lengths[idx],
payloadIndex[idx], payloadBytes,
new BytesRef(suffixBytes.bytes, suffixBytes.offset + fieldOff, fieldLen));
}
@Override
public int size() {
return fieldNumOffs.length;
}
}
private class TVTerms extends Terms {
private final int numTerms, flags;
private final int[] prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths, payloadIndex;
private final BytesRef termBytes, payloadBytes;
TVTerms(int numTerms, int flags, int[] prefixLengths, int[] suffixLengths, int[] termFreqs,
int[] positionIndex, int[] positions, int[] startOffsets, int[] lengths,
int[] payloadIndex, BytesRef payloadBytes,
BytesRef termBytes) {
this.numTerms = numTerms;
this.flags = flags;
this.prefixLengths = prefixLengths;
this.suffixLengths = suffixLengths;
this.termFreqs = termFreqs;
this.positionIndex = positionIndex;
this.positions = positions;
this.startOffsets = startOffsets;
this.lengths = lengths;
this.payloadIndex = payloadIndex;
this.payloadBytes = payloadBytes;
this.termBytes = termBytes;
}
@Override
public TermsEnum iterator(TermsEnum reuse) throws IOException {
final TVTermsEnum termsEnum;
if (reuse != null && reuse instanceof TVTermsEnum) {
termsEnum = (TVTermsEnum) reuse;
} else {
termsEnum = new TVTermsEnum();
}
termsEnum.reset(numTerms, flags, prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths,
payloadIndex, payloadBytes,
new ByteArrayDataInput(termBytes.bytes, termBytes.offset, termBytes.length));
return termsEnum;
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public long size() throws IOException {
return numTerms;
}
@Override
public long getSumTotalTermFreq() throws IOException {
return -1L;
}
@Override
public long getSumDocFreq() throws IOException {
return numTerms;
}
@Override
public int getDocCount() throws IOException {
return 1;
}
@Override
public boolean hasFreqs() {
return true;
}
@Override
public boolean hasOffsets() {
return (flags & OFFSETS) != 0;
}
@Override
public boolean hasPositions() {
return (flags & POSITIONS) != 0;
}
@Override
public boolean hasPayloads() {
return (flags & PAYLOADS) != 0;
}
}
private static class TVTermsEnum extends TermsEnum {
private int numTerms, startPos, ord;
private int[] prefixLengths, suffixLengths, termFreqs, positionIndex, positions, startOffsets, lengths, payloadIndex;
private ByteArrayDataInput in;
private BytesRef payloads;
private final BytesRef term;
private TVTermsEnum() {
term = new BytesRef(16);
}
void reset(int numTerms, int flags, int[] prefixLengths, int[] suffixLengths, int[] termFreqs, int[] positionIndex, int[] positions, int[] startOffsets, int[] lengths,
int[] payloadIndex, BytesRef payloads, ByteArrayDataInput in) {
this.numTerms = numTerms;
this.prefixLengths = prefixLengths;
this.suffixLengths = suffixLengths;
this.termFreqs = termFreqs;
this.positionIndex = positionIndex;
this.positions = positions;
this.startOffsets = startOffsets;
this.lengths = lengths;
this.payloadIndex = payloadIndex;
this.payloads = payloads;
this.in = in;
startPos = in.getPosition();
reset();
}
void reset() {
term.length = 0;
in.setPosition(startPos);
ord = -1;
}
@Override
public BytesRef next() throws IOException {
if (ord == numTerms - 1) {
return null;
} else {
assert ord < numTerms;
++ord;
}
// read term
term.offset = 0;
term.length = prefixLengths[ord] + suffixLengths[ord];
if (term.length > term.bytes.length) {
term.bytes = ArrayUtil.grow(term.bytes, term.length);
}
in.readBytes(term.bytes, prefixLengths[ord], suffixLengths[ord]);
return term;
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public SeekStatus seekCeil(BytesRef text)
throws IOException {
if (ord < numTerms && ord >= 0) {
final int cmp = term().compareTo(text);
if (cmp == 0) {
return SeekStatus.FOUND;
} else if (cmp > 0) {
reset();
}
}
// linear scan
while (true) {
final BytesRef term = next();
if (term == null) {
return SeekStatus.END;
}
final int cmp = term.compareTo(text);
if (cmp > 0) {
return SeekStatus.NOT_FOUND;
} else if (cmp == 0) {
return SeekStatus.FOUND;
}
}
}
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public BytesRef term() throws IOException {
return term;
}
@Override
public long ord() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int docFreq() throws IOException {
return 1;
}
@Override
public long totalTermFreq() throws IOException {
return termFreqs[ord];
}
@Override
public final DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
final TVDocsEnum docsEnum;
if (reuse != null && reuse instanceof TVDocsEnum) {
docsEnum = (TVDocsEnum) reuse;
} else {
docsEnum = new TVDocsEnum();
}
docsEnum.reset(liveDocs, termFreqs[ord], positionIndex[ord], positions, startOffsets, lengths, payloads, payloadIndex);
return docsEnum;
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
if (positions == null && startOffsets == null) {
return null;
}
// TODO: slightly sheisty
return (DocsAndPositionsEnum) docs(liveDocs, reuse, flags);
}
}
private static class TVDocsEnum extends DocsAndPositionsEnum {
private Bits liveDocs;
private int doc = -1;
private int termFreq;
private int positionIndex;
private int[] positions;
private int[] startOffsets;
private int[] lengths;
private final BytesRef payload;
private int[] payloadIndex;
private int basePayloadOffset;
private int i;
TVDocsEnum() {
payload = new BytesRef();
}
public void reset(Bits liveDocs, int freq, int positionIndex, int[] positions,
int[] startOffsets, int[] lengths, BytesRef payloads,
int[] payloadIndex) {
this.liveDocs = liveDocs;
this.termFreq = freq;
this.positionIndex = positionIndex;
this.positions = positions;
this.startOffsets = startOffsets;
this.lengths = lengths;
this.basePayloadOffset = payloads.offset;
this.payload.bytes = payloads.bytes;
payload.offset = payload.length = 0;
this.payloadIndex = payloadIndex;
doc = i = -1;
}
private void checkDoc() {
if (doc == NO_MORE_DOCS) {
throw new IllegalStateException("DocsEnum exhausted");
} else if (doc == -1) {
throw new IllegalStateException("DocsEnum not started");
}
}
private void checkPosition() {
checkDoc();
if (i < 0) {
throw new IllegalStateException("Position enum not started");
} else if (i >= termFreq) {
throw new IllegalStateException("Read past last position");
}
}
@Override
public int nextPosition() throws IOException {
if (doc != 0) {
throw new IllegalStateException();
} else if (i >= termFreq - 1) {
throw new IllegalStateException("Read past last position");
}
++i;
if (payloadIndex != null) {
payload.offset = basePayloadOffset + payloadIndex[positionIndex + i];
payload.length = payloadIndex[positionIndex + i + 1] - payloadIndex[positionIndex + i];
}
if (positions == null) {
return -1;
} else {
return positions[positionIndex + i];
}
}
@Override
public int startOffset() throws IOException {
checkPosition();
if (startOffsets == null) {
return -1;
} else {
return startOffsets[positionIndex + i];
}
}
@Override
public int endOffset() throws IOException {
checkPosition();
if (startOffsets == null) {
return -1;
} else {
return startOffsets[positionIndex + i] + lengths[positionIndex + i];
}
}
@Override
public BytesRef getPayload() throws IOException {
checkPosition();
if (payloadIndex == null || payload.length == 0) {
return null;
} else {
return payload;
}
}
@Override
public int freq() throws IOException {
checkDoc();
return termFreq;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
if (doc == -1 && (liveDocs == null || liveDocs.get(0))) {
return (doc = 0);
} else {
return (doc = NO_MORE_DOCS);
}
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public long cost() {
return 1;
}
}
private static int sum(int[] arr) {
int sum = 0;
for (int el : arr) {
sum += el;
}
return sum;
}
@Override
public long ramBytesUsed() {
return indexReader.ramBytesUsed();
}
}
|
apache/qpid-broker-j | 35,504 | broker-plugins/management-http/src/test/java/org/apache/qpid/server/management/plugin/controller/latest/LatestManagementControllerTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.qpid.server.management.plugin.controller.latest;
import static org.apache.qpid.server.management.plugin.HttpManagementConfiguration.PREFERENCE_OPERTAION_TIMEOUT_CONTEXT_NAME;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.security.PrivilegedAction;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.security.auth.Subject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.qpid.server.management.plugin.HttpManagementConfiguration;
import org.apache.qpid.server.management.plugin.ManagementException;
import org.apache.qpid.server.management.plugin.ManagementRequest;
import org.apache.qpid.server.management.plugin.ManagementResponse;
import org.apache.qpid.server.management.plugin.RequestType;
import org.apache.qpid.server.model.AuthenticationProvider;
import org.apache.qpid.server.model.Broker;
import org.apache.qpid.server.model.BrokerModel;
import org.apache.qpid.server.model.BrokerTestHelper;
import org.apache.qpid.server.model.ConfiguredObject;
import org.apache.qpid.server.model.Queue;
import org.apache.qpid.server.model.VirtualHost;
import org.apache.qpid.server.model.VirtualHostNode;
import org.apache.qpid.server.model.preferences.GenericPreferenceValueFactory;
import org.apache.qpid.server.model.preferences.Preference;
import org.apache.qpid.server.model.preferences.PreferenceImpl;
import org.apache.qpid.server.security.auth.AuthenticatedPrincipal;
import org.apache.qpid.server.security.auth.UsernamePrincipal;
import org.apache.qpid.server.virtualhost.QueueManagingVirtualHost;
import org.apache.qpid.test.utils.UnitTestBase;
public class LatestManagementControllerTest extends UnitTestBase
{
private LatestManagementController _controller;
@BeforeEach
public void setUp()
{
final HttpManagementConfiguration<?> httpManagement = mock(HttpManagementConfiguration.class);
when(httpManagement.getContextValue(Long.class, PREFERENCE_OPERTAION_TIMEOUT_CONTEXT_NAME)).thenReturn(1000L);
when(httpManagement.getModel()).thenReturn(BrokerModel.getInstance());
_controller = new LatestManagementController(httpManagement);
}
@Test
public void getVersion()
{
assertThat(_controller.getVersion(), is(equalTo(BrokerModel.MODEL_VERSION)));
}
@Test
public void getCategories()
{
assertThat(_controller.getCategories(), is(equalTo(BrokerModel.getInstance()
.getSupportedCategories()
.stream()
.map(Class::getSimpleName)
.collect(Collectors.toSet()))));
}
@Test
public void getCategoryMapping()
{
assertThat(_controller.getCategoryMapping("foo"),
is(equalTo(String.format("/api/v%s/%s/", BrokerModel.MODEL_VERSION, "foo"))));
}
@Test
public void getCategory()
{
final ConfiguredObject<?> object = mock(ConfiguredObject.class);
doReturn(Broker.class).when(object).getCategoryClass();
assertThat(_controller.getCategory(object), is(equalTo(Broker.class.getSimpleName())));
}
@Test
public void getCategoryHierarchyForBrokerRootAndQueueCategory()
{
final Broker<?> object = BrokerTestHelper.createBrokerMock();
final Collection<String> expected = List.of("VirtualHostNode", "VirtualHost", "Queue");
assertThat(_controller.getCategoryHierarchy(object, "Queue"), is(equalTo(expected)));
}
@Test
public void getCategoryHierarchyForVirtualHostRootAndExchangeCategory() throws Exception
{
final QueueManagingVirtualHost<?> object = BrokerTestHelper.createVirtualHost("test", this);
final Collection<String> expected = List.of("Exchange");
assertThat(_controller.getCategoryHierarchy(object, "Exchange"), is(equalTo(expected)));
}
@Test
public void getCategoryHierarchyForBrokerRootAndUnknownCategory()
{
final Broker<?> object = BrokerTestHelper.createBrokerMock();
final Collection<String> expected = List.of();
assertThat(_controller.getCategoryHierarchy(object, "Binding"), is(equalTo(expected)));
}
@Test
public void getNextVersionManagementController()
{
assertThat(_controller.getNextVersionManagementController(), is(nullValue()));
}
@Test
public void getRequestTypeForGetAndModelObjectWithNotFullPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
when(request.getPath()).thenReturn(List.of("*", hostName));
when(request.getParameters()).thenReturn(Map.of());
when(request.getMethod()).thenReturn("GET");
final RequestType type = _controller.getRequestType(request);
assertThat(type, is(equalTo(RequestType.MODEL_OBJECT)));
}
@Test
public void getRequestTypeForGetAndModelObjectWithFullPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
final List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar");
when(request.getPath()).thenReturn(path);
when(request.getParameters()).thenReturn(Map.of());
when(request.getMethod()).thenReturn("GET");
final RequestType type = _controller.getRequestType(request);
assertThat(type, is(equalTo(RequestType.MODEL_OBJECT)));
}
@Test
public void getRequestTypeForGetAndUserPreferences() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar", "userpreferences");
when(request.getPath()).thenReturn(path);
when(request.getParameters()).thenReturn(Map.of());
when(request.getMethod()).thenReturn("GET");
final RequestType type = _controller.getRequestType(request);
assertThat(type, is(equalTo(RequestType.USER_PREFERENCES)));
}
@Test
public void getRequestTypeForGetAndVisiblePreferences() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar", "visiblepreferences");
when(request.getPath()).thenReturn(path);
when(request.getParameters()).thenReturn(Map.of());
when(request.getMethod()).thenReturn("GET");
final RequestType type = _controller.getRequestType(request);
assertThat(type, is(equalTo(RequestType.VISIBLE_PREFERENCES)));
}
@Test
public void getForBrokerRootAndQueueSingletonPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final String nodeName = virtualHost.getParent().getName();
final List<String> path = List.of(nodeName, hostName, "foo");
final Object object = _controller.get(virtualHost.getBroker(), "queue", path, Map.of());
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Queue.class)));
final Queue data = (Queue) object;
assertThat(data.getName(), is(equalTo("foo")));
}
@Test
public void getForBrokerRootAndQueuePathNoQueueName() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final String nodeName = virtualHost.getParent().getName();
final List<String> path = List.of(nodeName, hostName);
final Object object = _controller.get(virtualHost.getBroker(), "queue", path, Map.of());
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Collection.class)));
final Collection<?> data = (Collection<?>) object;
final Iterator iterator = data.iterator();
final Object o = iterator.next();
final Object o2 = iterator.next();
assertThat(o, is(notNullValue()));
assertThat(o, is(instanceOf(Queue.class)));
assertThat(((Queue) o).getName(), is(equalTo("foo")));
assertThat(o2, is(notNullValue()));
assertThat(o2, is(instanceOf(Queue.class)));
assertThat(((Queue) o2).getName(), is(equalTo("bar")));
}
@Test
public void getForBrokerRootAndQueuePathWithWildCards() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final List<String> path = List.of("*", hostName);
final Object object = _controller.get(virtualHost.getBroker(), "queue", path, Map.of());
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Collection.class)));
final Collection<?> data = (Collection<?>) object;
assertThat(data.size(), is(equalTo(2)));
final Iterator iterator = data.iterator();
final Object o = iterator.next();
final Object o2 = iterator.next();
assertThat(o, is(notNullValue()));
assertThat(o, is(instanceOf(Queue.class)));
assertThat(((Queue) o).getName(), is(equalTo("foo")));
assertThat(o2, is(notNullValue()));
assertThat(o2, is(instanceOf(Queue.class)));
assertThat(((Queue) o2).getName(), is(equalTo("bar")));
}
@Test
public void getForBrokerRootAndQueuePathWithFilter() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar", "bar2");
final List<String> path = List.of("*", hostName);
final Object object = _controller.get(virtualHost.getBroker(),
"queue",
path,
Map.of(Queue.NAME, List.of("foo", "bar")));
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Collection.class)));
final Collection<?> data = (Collection<?>) object;
assertThat(data.size(), is(equalTo(2)));
final Iterator iterator = data.iterator();
final Object o = iterator.next();
final Object o2 = iterator.next();
assertThat(o, is(notNullValue()));
assertThat(o, is(instanceOf(Queue.class)));
assertThat(((Queue) o).getName(), is(equalTo("foo")));
assertThat(o2, is(notNullValue()));
assertThat(o2, is(instanceOf(Queue.class)));
assertThat(((Queue) o2).getName(), is(equalTo("bar")));
}
@Test
public void createOrUpdateUsingPutAndFullPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar");
final Object object = _controller.createOrUpdate(virtualHost.getBroker(),
"queue",
path,
new HashMap<>(Map.of(Queue.NAME, "bar")),
false);
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Queue.class)));
assertThat(((Queue) object).getName(), is(equalTo("bar")));
}
@Test
public void createOrUpdateUsingPostAndFullPathForNonExisting() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar");
try
{
_controller.createOrUpdate(virtualHost.getBroker(),
"queue",
path,
Map.of(Queue.NAME, "bar"),
true);
fail("Post update should fail for non existing");
}
catch (ManagementException e)
{
assertThat(e.getStatusCode(), is(equalTo(404)));
}
}
@Test
public void createOrUpdateUsingPostAndFullPathForExisting() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "bar");
final List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar");
final Object object = _controller.createOrUpdate(virtualHost.getBroker(),
"queue",
path,
Map.of(Queue.NAME, "bar"),
true);
assertThat(object, is(nullValue()));
}
@Test
public void createOrUpdateUsingPostAndParentPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final List<String> path = List.of(virtualHost.getParent().getName(), hostName);
final Object object = _controller.createOrUpdate(virtualHost.getBroker(),
"queue",
path,
Map.of(Queue.NAME, "bar"),
true);
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Queue.class)));
assertThat(((Queue) object).getName(), is(equalTo("bar")));
}
@Test
public void deleteUsingFullPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
List<String> path = List.of(virtualHost.getParent().getName(), hostName, "bar");
int count = _controller.delete(virtualHost.getBroker(), "queue", path, Map.of());
assertThat(count, is(equalTo(1)));
}
@Test
public void deleteUsingFilter() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
List<String> path = List.of(virtualHost.getParent().getName(), hostName);
int count = _controller.delete(virtualHost.getBroker(),
"queue",
path,
Map.of(Queue.NAME, List.of("foo", "bar", "bar2")));
assertThat(count, is(equalTo(2)));
}
@Test
public void deleteUsingWildcard() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
List<String> path = List.of(virtualHost.getParent().getName(), hostName, "*");
int count = _controller.delete(virtualHost.getBroker(), "queue", path, Map.of());
assertThat(count, is(equalTo(2)));
}
@Test
public void invoke() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
List<String> path = List.of(virtualHost.getParent().getName(), hostName);
Map<String, Object> message = new HashMap<>();
message.put("address", "foo");
message.put("persistent", "false");
message.put("content", "Test Content");
message.put("mimeType", "text/plain");
ManagementResponse response = _controller.invoke(virtualHost.getBroker(),
"virtualhost",
path,
"publishMessage",
Map.of("message", message),
true,
true);
assertThat(response, is(notNullValue()));
assertThat(response.getResponseCode(), is(equalTo(200)));
Object body = response.getBody();
assertThat(body, is(instanceOf(Number.class)));
assertThat(((Number) body).intValue(), is(equalTo(1)));
}
@Test
public void getPreferences() throws Exception
{
final String hostName = "default";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final String preferencesType = "X-type-preference";
final Map<String, Object> preferenceValue = Map.of("foo", "bar");
final Subject testSubject = createTestSubject();
final String prefernceName = "test";
createPreferences(testSubject, virtualHost, preferencesType, prefernceName, preferenceValue);
List<String> path = List.of(virtualHost.getParent().getName(), hostName, "userpreferences");
final Object preferences = Subject.doAs(testSubject, (PrivilegedAction<Object>) () ->
_controller.getPreferences(virtualHost.getBroker(), "virtualhost", path, Map.of()));
assertPreference(preferencesType, prefernceName, preferenceValue, preferences);
}
@Test
public void setPreferences() throws Exception
{
final String hostName = "default";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final String preferencesType = "X-type";
final Map<String, Object> preferenceValue = Map.of("foo", "bar");
final Subject testSubject = createTestSubject();
final String preferenceName = "pref";
final UUID id = createPreferences(testSubject, virtualHost, preferencesType, preferenceName, preferenceValue);
final List<String> path = List.of(virtualHost.getParent().getName(), hostName, "userpreferences");
final Map<String, Object> newValue = Map.of("foo", "bar2");
final Map<String, Object> data = new HashMap<>();
data.put("id", id.toString());
data.put("name", preferenceName);
data.put("value", newValue);
final Map<String, List<Object>> modifiedPreferences = Map.of(preferencesType, List.of(data));
Subject.doAs(testSubject, (PrivilegedAction<Void>) () -> {
_controller.setPreferences(virtualHost.getBroker(),
"virtualhost",
path,
modifiedPreferences,
Map.of(),
true);
return null;
});
final Object preferences = Subject.doAs(testSubject, (PrivilegedAction<Object>) () ->
_controller.getPreferences(virtualHost.getBroker(), "virtualhost", path, Map.of()));
assertPreference(preferencesType, preferenceName, newValue, preferences);
}
@Test
public void deletePreferences() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName);
final String preferencesType = "X-type";
final Map<String, Object> preferenceValue = Map.of("foo", "bar");
final Subject testSubject = createTestSubject();
final String preferenceName = "pref";
createPreferences(testSubject, virtualHost, preferencesType, preferenceName, preferenceValue);
final List<String> path = List.of(virtualHost.getParent().getName(),
hostName,
"userpreferences",
preferencesType,
preferenceName);
Subject.doAs(testSubject, (PrivilegedAction<Void>) () -> {
_controller.deletePreferences(virtualHost.getBroker(),
"virtualhost",
path,
Map.of());
return null;
});
final List<String> path2 = List.of(virtualHost.getParent().getName(), hostName, "userpreferences");
final Object preferences = Subject.doAs(testSubject, (PrivilegedAction<Object>) () ->
_controller.getPreferences(virtualHost.getBroker(), "virtualhost", path2, Map.of()));
assertThat(preferences, is(notNullValue()));
assertThat(preferences, is(instanceOf(Map.class)));
final Map<?, ?> map = (Map<?, ?>) preferences;
assertThat(map.size(), is(equalTo(0)));
}
@Test
public void formatConfiguredObjectForSingletonResponse() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final Object formatted = _controller.formatConfiguredObject(virtualHost, Map.of("depth", List.of("1")), true);
assertThat(formatted, is(notNullValue()));
assertThat(formatted, is(instanceOf(Map.class)));
final Map<?, ?> data = (Map<?, ?>) formatted;
assertThat(data.get(VirtualHost.NAME), is(equalTo(hostName)));
final Object queues = data.get("queues");
assertThat(queues, is(notNullValue()));
assertThat(queues, is(instanceOf(Collection.class)));
final Collection<?> queueCollection = (Collection<?>) queues;
assertThat(queueCollection.size(), is(equalTo(2)));
final Iterator<?> iterator = queueCollection.iterator();
final Object queue1 = iterator.next();
final Object queue2 = iterator.next();
assertThat(queue1, is(instanceOf(Map.class)));
assertThat(queue2, is(instanceOf(Map.class)));
final Map<?, ?> queueMap1 = (Map<?, ?>) queue1;
final Map<?, ?> queueMap2 = (Map<?, ?>) queue2;
assertThat(queueMap1.get(Queue.NAME), is(equalTo("bar")));
assertThat(queueMap2.get(Queue.NAME), is(equalTo("foo")));
}
@Test
public void formatConfiguredObjectForCollectionResponse() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final Object formatted = _controller.formatConfiguredObject(List.of(virtualHost), Map.of("depth", List.of("1")), true);
assertThat(formatted, is(notNullValue()));
assertThat(formatted, is(instanceOf(Collection.class)));
final Collection<?> formattedCollection = (Collection<?>) formatted;
assertThat(formattedCollection.size(), is(equalTo(1)));
Object item = formattedCollection.iterator().next();
assertThat(item, is(instanceOf(Map.class)));
final Map<?, ?> data = (Map<?, ?>) item;
assertThat(data.get(VirtualHost.NAME), is(equalTo(hostName)));
final Object queues = data.get("queues");
assertThat(queues, is(notNullValue()));
assertThat(queues, is(instanceOf(Collection.class)));
final Collection<?> queueCollection = (Collection<?>) queues;
assertThat(queueCollection.size(), is(equalTo(2)));
final Iterator<?> iterator = queueCollection.iterator();
final Object queue1 = iterator.next();
final Object queue2 = iterator.next();
assertThat(queue1, is(instanceOf(Map.class)));
assertThat(queue2, is(instanceOf(Map.class)));
final Map<?, ?> queueMap1 = (Map<?, ?>) queue1;
final Map<?, ?> queueMap2 = (Map<?, ?>) queue2;
assertThat(queueMap1.get(Queue.NAME), is(equalTo("bar")));
assertThat(queueMap2.get(Queue.NAME), is(equalTo("foo")));
}
@Test
public void handleGetForBrokerRootAndQueueSingletonPath() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final String nodeName = virtualHost.getParent().getName();
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
when(request.getPath()).thenReturn(List.of(nodeName, hostName, "foo"));
when(request.getMethod()).thenReturn("GET");
final ManagementResponse response = _controller.handleGet(request);
assertThat(response, is(notNullValue()));
assertThat(response.getResponseCode(), is(equalTo(200)));
assertThat(response.getBody(), is(notNullValue()));
assertThat(response.getBody(), is(instanceOf(Queue.class)));
final Queue data = (Queue) response.getBody();
assertThat(data.getName(), is(equalTo("foo")));
}
@Test
public void handleGetForBrokerRootAndQueuePathWithoutQueueName() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final String nodeName = virtualHost.getParent().getName();
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
when(request.getPath()).thenReturn(List.of(nodeName, hostName));
when(request.getParameters()).thenReturn(Map.of());
when(request.getMethod()).thenReturn("GET");
final ManagementResponse response = _controller.handleGet(request);
assertThat(response, is(notNullValue()));
assertThat(response.getResponseCode(), is(equalTo(200)));
assertThat(response.getBody(), is(notNullValue()));
assertThat(response.getBody(), is(instanceOf(Collection.class)));
final Collection data = (Collection) response.getBody();
assertThat(data.size(), is(equalTo(2)));
final Iterator iterator = data.iterator();
final Object object = iterator.next();
final Object object2 = iterator.next();
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Queue.class)));
assertThat(((Queue) object).getName(), is(equalTo("foo")));
assertThat(object2, is(notNullValue()));
assertThat(object2, is(instanceOf(Queue.class)));
assertThat(((Queue) object2).getName(), is(equalTo("bar")));
}
@Test
public void handleGetForBrokerRootAndQueuePathWithFilter() throws Exception
{
final String hostName = "test";
final QueueManagingVirtualHost<?> virtualHost = createVirtualHostWithQueue(hostName, "foo", "bar");
final String nodeName = virtualHost.getParent().getName();
final ManagementRequest request = mock(ManagementRequest.class);
when(request.getCategory()).thenReturn("queue");
doReturn(virtualHost.getBroker()).when(request).getRoot();
when(request.getPath()).thenReturn(List.of(nodeName, hostName));
when(request.getParameters()).thenReturn(Map.of("name", List.of("bar")));
when(request.getMethod()).thenReturn("GET");
ManagementResponse response = _controller.handleGet(request);
assertThat(response, is(notNullValue()));
assertThat(response.getResponseCode(), is(equalTo(200)));
assertThat(response.getBody(), is(notNullValue()));
assertThat(response.getBody(), is(instanceOf(Collection.class)));
Collection data = (Collection) response.getBody();
assertThat(data.size(), is(equalTo(1)));
Object object = data.iterator().next();
assertThat(object, is(notNullValue()));
assertThat(object, is(instanceOf(Queue.class)));
assertThat(((Queue) object).getName(), is(equalTo("bar")));
}
private QueueManagingVirtualHost<?> createVirtualHostWithQueue(final String hostName, String... queueName)
throws Exception
{
final QueueManagingVirtualHost<?> virtualHost = BrokerTestHelper.createVirtualHost(hostName, this);
final Broker root = virtualHost.getBroker();
final ConfiguredObject<?> virtualHostNode = virtualHost.getParent();
when(root.getChildren(VirtualHostNode.class)).thenReturn(List.of(virtualHostNode));
when(virtualHostNode.getChildren(VirtualHost.class)).thenReturn(List.of(virtualHost));
when(virtualHostNode.getChildByName(VirtualHost.class, hostName)).thenReturn(virtualHost);
Stream.of(queueName)
.forEach(n -> virtualHost.createChild(Queue.class, Map.of(Queue.NAME, n)));
return virtualHost;
}
private UUID createPreferences(final Subject testSubject,
final QueueManagingVirtualHost<?> virtualHost,
final String preferenceType,
final String preferenceName,
final Map<String, Object> preferenceValue)
throws Exception
{
UUID uuid = UUID.randomUUID();
final Preference preference = new PreferenceImpl(virtualHost,
uuid,
preferenceName,
preferenceType,
"Some preference",
null,
new Date(),
new Date(),
null,
new GenericPreferenceValueFactory().createInstance(
preferenceValue));
final List<Preference> preferenceList = List.of(preference);
final Future<Void> result = Subject.doAs(testSubject,
(PrivilegedAction<Future<Void>>) () -> virtualHost.getUserPreferences()
.updateOrAppend(
preferenceList));
result.get(2000L, TimeUnit.MILLISECONDS);
return uuid;
}
private Subject createTestSubject()
{
final AuthenticationProvider<?> provider = mock(AuthenticationProvider.class);
when(provider.getType()).thenReturn("type");
when(provider.getName()).thenReturn("name");
return new Subject(false,
Set.of(new AuthenticatedPrincipal(new UsernamePrincipal("user", provider))),
Set.of(),
Set.of());
}
private void assertPreference(final String expectedType,
final String expectedName,
final Map<String, Object> expectedValue,
final Object preferences)
{
assertThat(preferences, is(notNullValue()));
assertThat(preferences, is(instanceOf(Map.class)));
final Map<?, ?> data = (Map<?, ?>) preferences;
final Object pt = data.get(expectedType);
assertThat(pt, is(notNullValue()));
assertThat(pt, is(instanceOf(Collection.class)));
final Collection<?> items = (Collection<?>) pt;
assertThat(items.size(), is(equalTo(1)));
final Object item = items.iterator().next();
assertThat(item, is(notNullValue()));
assertThat(item, is(instanceOf(Map.class)));
final Map<?, ?> map = (Map<?, ?>) item;
final Object value = map.get("value");
assertThat(value, is(notNullValue()));
assertThat(value, is(equalTo(expectedValue)));
final Object name = map.get("name");
assertThat(name, is(notNullValue()));
assertThat(name, is(equalTo(expectedName)));
}
} |
apache/solr | 35,751 | solr/core/src/java/org/apache/solr/request/IntervalFacets.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.request;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FilterNumericDocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.request.IntervalFacets.FacetInterval;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.NumberType;
import org.apache.solr.schema.PointField;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
/**
* Computes interval facets for docvalues field (single or multivalued).
*
* <p>Given a set of intervals for a field and a DocSet, it calculates the number of documents that
* match each of the intervals provided. The final count for each interval should be exactly the
* same as the number of results of a range query using the DocSet and the range as filters. This
* means that the count of {@code facet.query=field:[A TO B]} should be the same as the count of
* {@code f.field.facet.interval.set=[A,B]}, however, this method will usually be faster in cases
* where there are a larger number of intervals per field.
*
* <p>To use this class, create an instance using {@link #IntervalFacets(SchemaField,
* SolrIndexSearcher, DocSet, String[], SolrParams)} and then iterate the {@link FacetInterval}
* using {@link #iterator()}
*
* <p>Intervals Format<br>
* Intervals must begin with either '(' or '[', be followed by the start value, then a comma ',',
* the end value, and finally ')' or ']'. For example:
*
* <ul>
* <li>(1,10) -> will include values greater than 1 and lower than 10
* <li>[1,10) -> will include values greater or equal to 1 and lower than 10
* <li>[1,10] -> will include values greater or equal to 1 and lower or equal to 10
* </ul>
*
* The initial and end values can't be empty, if the interval needs to be unbounded, the special
* character '*' can be used for both, start and end limit. When using '*', '(' and '[', and ')' and
* ']' will be treated equal. [*,*] will include all documents with a value in the field
*
* <p>The interval limits may be strings, there is no need to add quotes, all the text until the
* comma will be treated as the start limit, and the text after that will be the end limit, for
* example: [Buenos Aires,New York]. Keep in mind that a string-like comparison will be done to
* match documents in string intervals (case-sensitive). The comparator can't be changed. Commas,
* brackets and square brackets can be escaped by using '\' in front of them. Whitespaces before and
* after the values will be omitted. Start limit can't be grater than the end limit. Equal limits
* are allowed.
*
* <p>As with facet.query, the key used to display the result can be set by using local params
* syntax, for example:
*
* <p><code>{!key='First Half'}[0,5) </code>
*
* <p>To use this class:
*
* <pre>
* IntervalFacets intervalFacets = new IntervalFacets(schemaField, searcher, docs, intervalStrs, params);
* for (FacetInterval interval : intervalFacets) {
* results.add(interval.getKey(), interval.getCount());
* }
* </pre>
*/
public class IntervalFacets implements Iterable<FacetInterval> {
private final SchemaField schemaField;
private final SolrIndexSearcher searcher;
private final DocSet docs;
private final FacetInterval[] intervals;
/**
* Constructor that accepts un-parsed intervals using "interval faceting" syntax. See {@link
* IntervalFacets} for syntax. Intervals don't need to be in order.
*/
public IntervalFacets(
SchemaField schemaField,
SolrIndexSearcher searcher,
DocSet docs,
String[] intervals,
SolrParams params)
throws SyntaxError, IOException {
this.schemaField = schemaField;
this.searcher = searcher;
this.docs = docs;
this.intervals = getSortedIntervals(intervals, params);
doCount();
}
/**
* Constructor that accepts an already constructed array of {@link FacetInterval} objects. This
* array needs to be sorted by start value in weakly ascending order. null values are not allowed
* in the array.
*/
public IntervalFacets(
SchemaField schemaField, SolrIndexSearcher searcher, DocSet docs, FacetInterval[] intervals)
throws IOException {
this.schemaField = schemaField;
this.searcher = searcher;
this.docs = docs;
this.intervals = intervals;
doCount();
}
private FacetInterval[] getSortedIntervals(String[] intervals, SolrParams params)
throws SyntaxError {
FacetInterval[] sortedIntervals = new FacetInterval[intervals.length];
int idx = 0;
for (String intervalStr : intervals) {
sortedIntervals[idx++] = new FacetInterval(schemaField, intervalStr, params);
}
/*
* This comparator sorts the intervals by start value from lower to greater
*/
Arrays.sort(
sortedIntervals,
new Comparator<FacetInterval>() {
@Override
public int compare(FacetInterval o1, FacetInterval o2) {
assert o1 != null;
assert o2 != null;
return compareStart(o1, o2);
}
private int compareStart(FacetInterval o1, FacetInterval o2) {
if (o1.start == null) {
if (o2.start == null) {
return 0;
}
return -1;
}
if (o2.start == null) {
return 1;
}
int startComparison = o1.start.compareTo(o2.start);
if (startComparison == 0) {
if (o1.startOpen != o2.startOpen) {
if (!o1.startOpen) {
return -1;
} else {
return 1;
}
}
}
return startComparison;
}
});
return sortedIntervals;
}
private void doCount() throws IOException {
if (schemaField.getType().getNumberType() != null
&& (!schemaField.multiValued() || schemaField.getType().isPointField())) {
if (schemaField.multiValued()) {
getCountMultiValuedNumeric();
} else {
getCountNumeric();
}
} else {
getCountString();
}
}
private void getCountNumeric() throws IOException {
final FieldType ft = schemaField.getType();
final String fieldName = schemaField.getName();
final NumberType numericType = ft.getNumberType();
if (numericType == null) {
throw new IllegalStateException();
}
final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
final Iterator<LeafReaderContext> ctxIt = leaves.iterator();
LeafReaderContext ctx = null;
NumericDocValues longs = null;
for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
final int doc = docsIt.nextDoc();
if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) {
do {
ctx = ctxIt.next();
} while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc());
assert doc >= ctx.docBase;
switch (numericType) {
case LONG:
case DATE:
case INTEGER:
longs = DocValues.getNumeric(ctx.reader(), fieldName);
break;
case FLOAT:
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs =
new FilterNumericDocValues(DocValues.getNumeric(ctx.reader(), fieldName)) {
@Override
public long longValue() throws IOException {
return NumericUtils.sortableFloatBits((int) super.longValue());
}
};
break;
case DOUBLE:
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs =
new FilterNumericDocValues(DocValues.getNumeric(ctx.reader(), fieldName)) {
@Override
public long longValue() throws IOException {
return NumericUtils.sortableDoubleBits(super.longValue());
}
};
break;
default:
throw new AssertionError();
}
}
int valuesDocID = longs.docID();
if (valuesDocID < doc - ctx.docBase) {
valuesDocID = longs.advance(doc - ctx.docBase);
}
if (valuesDocID == doc - ctx.docBase) {
accumIntervalWithValue(longs.longValue());
}
}
}
private void getCountMultiValuedNumeric() throws IOException {
final FieldType ft = schemaField.getType();
final String fieldName = schemaField.getName();
if (ft.getNumberType() == null) {
throw new IllegalStateException();
}
final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
final Iterator<LeafReaderContext> ctxIt = leaves.iterator();
LeafReaderContext ctx = null;
SortedNumericDocValues longs = null;
for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
final int doc = docsIt.nextDoc();
if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) {
do {
ctx = ctxIt.next();
} while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc());
assert doc >= ctx.docBase;
longs = DocValues.getSortedNumeric(ctx.reader(), fieldName);
}
int valuesDocID = longs.docID();
if (valuesDocID < doc - ctx.docBase) {
valuesDocID = longs.advance(doc - ctx.docBase);
}
if (valuesDocID == doc - ctx.docBase) {
accumIntervalWithMultipleValues(longs);
}
}
}
private void getCountString() throws IOException {
List<LeafReaderContext> leaves = searcher.getTopReaderContext().leaves();
for (int subIndex = 0; subIndex < leaves.size(); subIndex++) {
LeafReaderContext leaf = leaves.get(subIndex);
// solr docsets already exclude any deleted docs
final DocIdSetIterator disi = docs.iterator(leaf);
if (disi != null) {
if (schemaField.multiValued()) {
SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(schemaField.getName());
if (sub == null) {
continue;
}
final SortedDocValues singleton = DocValues.unwrapSingleton(sub);
if (singleton != null) {
// some codecs may optimize SORTED_SET storage for single-valued fields
accumIntervalsSingle(singleton, disi);
} else {
accumIntervalsMulti(sub, disi);
}
} else {
SortedDocValues sub = leaf.reader().getSortedDocValues(schemaField.getName());
if (sub == null) {
continue;
}
accumIntervalsSingle(sub, disi);
}
}
}
}
private void accumIntervalWithMultipleValues(SortedNumericDocValues longs) throws IOException {
// longs should be already positioned to the correct doc
assert longs.docID() != -1;
final int docValueCount = longs.docValueCount();
assert docValueCount > 0 : "Should have at least one value for this document";
int currentInterval = 0;
for (int i = 0; i < docValueCount; i++) {
boolean evaluateNextInterval = true;
long value = longs.nextValue();
while (evaluateNextInterval && currentInterval < intervals.length) {
IntervalCompareResult result = intervals[currentInterval].includes(value);
switch (result) {
case INCLUDED:
/*
* Increment the current interval and move to the next one using
* the same value
*/
intervals[currentInterval].incCount();
currentInterval++;
break;
case LOWER_THAN_START:
/*
* None of the next intervals will match this value (all of them have
* higher start value). Move to the next value for this document.
*/
evaluateNextInterval = false;
break;
case GREATER_THAN_END:
/*
* Next interval may match this value
*/
currentInterval++;
break;
}
// Maybe return if currentInterval == intervals.length?
}
}
}
private void accumIntervalsMulti(SortedSetDocValues ssdv, DocIdSetIterator disi)
throws IOException {
// First update the ordinals in the intervals for this segment
for (FacetInterval interval : intervals) {
interval.updateContext(ssdv);
}
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (doc > ssdv.docID()) {
ssdv.advance(doc);
}
if (doc == ssdv.docID()) {
long currOrd;
int currentInterval = 0;
for (int o = 0; o < ssdv.docValueCount(); o++) {
currOrd = ssdv.nextOrd();
boolean evaluateNextInterval = true;
while (evaluateNextInterval && currentInterval < intervals.length) {
IntervalCompareResult result = intervals[currentInterval].includes(currOrd);
switch (result) {
case INCLUDED:
/*
* Increment the current interval and move to the next one using
* the same value
*/
intervals[currentInterval].incCount();
currentInterval++;
break;
case LOWER_THAN_START:
/*
* None of the next intervals will match this value (all of them have
* higher start value). Move to the next value for this document.
*/
evaluateNextInterval = false;
break;
case GREATER_THAN_END:
/*
* Next interval may match this value
*/
currentInterval++;
break;
}
}
}
}
}
}
private void accumIntervalsSingle(SortedDocValues sdv, DocIdSetIterator disi) throws IOException {
// First update the ordinals in the intervals to this segment
for (FacetInterval interval : intervals) {
interval.updateContext(sdv);
}
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (doc > sdv.docID()) {
sdv.advance(doc);
}
if (doc == sdv.docID()) {
accumInterval(sdv.ordValue());
}
}
}
private void accumInterval(int ordinal) {
assert ordinal >= 0;
accumIntervalWithValue(ordinal);
}
private void accumIntervalWithValue(long value) {
for (int i = 0; i < intervals.length; i++) {
FacetInterval interval = intervals[i];
IntervalCompareResult result = interval.includes(value);
if (result == IntervalCompareResult.INCLUDED) {
interval.incCount();
} else if (result == IntervalCompareResult.LOWER_THAN_START) {
// All intervals after this will have equal or grater start value,
// we can skip them
break;
}
}
}
static enum IntervalCompareResult {
LOWER_THAN_START,
INCLUDED,
GREATER_THAN_END,
}
/** Helper class to match and count of documents in specified intervals */
public static class FacetInterval {
/** Key to represent this interval */
private final String key;
/** Start value for this interval as indicated in the request */
final BytesRef start;
/** End value for this interval as indicated in the request */
final BytesRef end;
/** Whether or not this interval includes or not the lower limit */
private final boolean startOpen;
/** Whether or not this interval includes or not the upper limit */
private final boolean endOpen;
/**
* Lower limit to which compare a document value. If the field in which we are faceting is
* single value numeric, then this number will be the {@code long} representation of {@link
* #start}, and in this case the limit doesn't need to be updated once it is set (will be set in
* the constructor and remain equal for the life of this object). If the field is multivalued
* and/or non-numeric, then this number will be the lower limit ordinal for a value to be
* included in this interval. In this case, {@link #startLimit} needs to be set using either
* {@link #updateContext(SortedDocValues)} or {@link #updateContext(SortedSetDocValues)}
* (depending on the field type) for every segment before calling {@link #includes(long)} for
* any document in the segment.
*/
private long startLimit;
/**
* Upper limit to which compare a document value. If the field in which we are faceting is
* single value numeric, then this number will be the {@code long} representation of {@link
* #end}, and in this case the limit doesn't need to be updated once it is set (will be set in
* the constructor and remain equal for the life of this object). If the field is multivalued
* and/or non-numeric, then this number will be the upper limit ordinal for a value to be
* included in this interval. In this case, {@link #endLimit} needs to be set using either
* {@link #updateContext(SortedDocValues)} or {@link #updateContext(SortedSetDocValues)}
* (depending on the field type) for every segment before calling {@link #includes(long)} for
* any document in the segment.
*/
private long endLimit;
/** The current count of documents in that match this interval */
private int count;
/** If this field is set to true, this interval {@code #getCount()} will always return 0. */
private boolean includeNoDocs = false;
/**
* Constructor that accepts un-parsed interval faceting syntax. See {@link IntervalFacets} for
* details
*
* @param schemaField schemaField for this range
* @param intervalStr String the interval. See {@link IntervalFacets} for syntax
* @param params SolrParams of this request, mostly used to get local params
*/
FacetInterval(SchemaField schemaField, String intervalStr, SolrParams params)
throws SyntaxError {
if (intervalStr == null) throw new SyntaxError("empty facet interval");
intervalStr = intervalStr.trim();
if (intervalStr.length() == 0) throw new SyntaxError("empty facet interval");
try {
SolrParams localParams = QueryParsing.getLocalParams(intervalStr, params);
if (localParams != null) {
int localParamEndIdx = 2; // omit index of {!
while (true) {
localParamEndIdx = intervalStr.indexOf(QueryParsing.LOCALPARAM_END, localParamEndIdx);
// Local param could be escaping '}'
if (intervalStr.charAt(localParamEndIdx - 1) != '\\') {
break;
}
localParamEndIdx++;
}
intervalStr = intervalStr.substring(localParamEndIdx + 1);
key = localParams.get(CommonParams.OUTPUT_KEY, intervalStr);
} else {
key = intervalStr;
}
} catch (SyntaxError e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
if (intervalStr.charAt(0) == '(') {
startOpen = true;
} else if (intervalStr.charAt(0) == '[') {
startOpen = false;
} else {
throw new SyntaxError(
"Invalid start character "
+ intervalStr.charAt(0)
+ " in facet interval "
+ intervalStr);
}
final int lastNdx = intervalStr.length() - 1;
if (intervalStr.charAt(lastNdx) == ')') {
endOpen = true;
} else if (intervalStr.charAt(lastNdx) == ']') {
endOpen = false;
} else {
throw new SyntaxError(
"Invalid end character "
+ intervalStr.charAt(lastNdx)
+ " in facet interval "
+ intervalStr);
}
StringBuilder startStr = new StringBuilder(lastNdx);
int i = unescape(intervalStr, 1, lastNdx, startStr);
if (i == lastNdx) {
if (intervalStr.charAt(lastNdx - 1) == ',') {
throw new SyntaxError("Empty interval limit");
}
throw new SyntaxError("Missing unescaped comma separating interval ends in " + intervalStr);
}
try {
start = getLimitFromString(schemaField, startStr);
} catch (SyntaxError | SolrException e) {
throw new SyntaxError(
String.format(
Locale.ROOT, "Invalid start interval for key '%s': %s", key, e.getMessage()),
e);
}
StringBuilder endStr = new StringBuilder(lastNdx);
i = unescape(intervalStr, i, lastNdx, endStr);
if (i != lastNdx) {
throw new SyntaxError(
"Extra unescaped comma at index " + i + " in interval " + intervalStr);
}
try {
end = getLimitFromString(schemaField, endStr);
} catch (SyntaxError | SolrException e) {
throw new SyntaxError(
String.format(
Locale.ROOT, "Invalid end interval for key '%s': %s", key, e.getMessage()),
e);
}
// TODO: what about escaping star (*)?
// TODO: escaping spaces on ends?
if (schemaField.getType().getNumberType() != null) {
setNumericLimits(schemaField);
}
if (start != null && end != null && start.compareTo(end) > 0) {
throw new SyntaxError("Start is higher than end in interval for key: " + key);
}
}
/**
* Constructor that accepts already parsed values of start and end. This constructor can only be
* used with numeric field types.
*
* @param schemaField schemaField for this range
* @param startStr String representation of the start value of this interval. Can be a "*".
* @param endStr String representation of the end value of this interval. Can be a "*".
* @param includeLower Indicates weather this interval should include values equal to start
* @param includeUpper Indicates weather this interval should include values equal to end
* @param key String key of this interval
*/
public FacetInterval(
SchemaField schemaField,
String startStr,
String endStr,
boolean includeLower,
boolean includeUpper,
String key) {
assert schemaField.getType().getNumberType() != null
: "Only numeric fields supported with this constructor";
this.key = key;
this.startOpen = !includeLower;
this.endOpen = !includeUpper;
this.start = getLimitFromString(schemaField, startStr);
this.end = getLimitFromString(schemaField, endStr);
assert start == null || end == null || start.compareTo(end) < 0
: "Bad start/end limits: " + startStr + "/" + endStr;
setNumericLimits(schemaField);
}
/**
* Set startLimit and endLimit for numeric values. The limits in this case are going to be the
* <code>long</code> representation of the original value. <code>startLimit</code> will be
* incremented by one in case of the interval start being exclusive. <code>endLimit</code> will
* be decremented by one in case of the interval end being exclusive.
*/
private void setNumericLimits(SchemaField schemaField) {
if (start == null) {
startLimit = Long.MIN_VALUE;
} else {
switch (schemaField.getType().getNumberType()) {
case LONG:
startLimit = (long) schemaField.getType().toObject(schemaField, start);
break;
case DATE:
startLimit = ((Date) schemaField.getType().toObject(schemaField, start)).getTime();
break;
case INTEGER:
startLimit = ((Integer) schemaField.getType().toObject(schemaField, start)).longValue();
break;
case FLOAT:
startLimit =
NumericUtils.floatToSortableInt(
(float) schemaField.getType().toObject(schemaField, start));
break;
case DOUBLE:
startLimit =
NumericUtils.doubleToSortableLong(
(double) schemaField.getType().toObject(schemaField, start));
break;
default:
throw new AssertionError();
}
if (startOpen) {
if (startLimit == Long.MAX_VALUE) {
/*
* This interval can match no docs
*/
includeNoDocs = true;
} else {
startLimit++;
}
}
}
if (end == null) {
endLimit = Long.MAX_VALUE;
} else {
switch (schemaField.getType().getNumberType()) {
case LONG:
endLimit = (long) schemaField.getType().toObject(schemaField, end);
break;
case DATE:
endLimit = ((Date) schemaField.getType().toObject(schemaField, end)).getTime();
break;
case INTEGER:
endLimit = ((Integer) schemaField.getType().toObject(schemaField, end)).longValue();
break;
case FLOAT:
endLimit =
NumericUtils.floatToSortableInt(
(float) schemaField.getType().toObject(schemaField, end));
break;
case DOUBLE:
endLimit =
NumericUtils.doubleToSortableLong(
(double) schemaField.getType().toObject(schemaField, end));
break;
default:
throw new AssertionError();
}
if (endOpen) {
if (endLimit == Long.MIN_VALUE) {
/*
* This interval can match no docs
*/
includeNoDocs = true;
} else {
endLimit--;
}
}
}
}
private BytesRef getLimitFromString(SchemaField schemaField, StringBuilder builder)
throws SyntaxError {
String value = builder.toString().trim();
if (value.length() == 0) {
throw new SyntaxError("Empty interval limit");
}
return getLimitFromString(schemaField, value);
}
private BytesRef getLimitFromString(SchemaField schemaField, String value) {
if ("*".equals(value)) {
return null;
}
if (schemaField.getType().isPointField()) {
return ((PointField) schemaField.getType()).toInternalByteRef(value);
}
return new BytesRef(schemaField.getType().toInternal(value));
}
/**
* Update the ordinals based on the current reader. This method (or {@link
* #updateContext(SortedSetDocValues)} depending on the DocValues type) needs to be called for
* every reader before {@link #includes(long)} is called on any document of the reader.
*
* @param sdv DocValues for the current reader
*/
public void updateContext(SortedDocValues sdv) throws IOException {
if (start == null) {
/*
* Unset start. All ordinals will be greater than -1.
*/
startLimit = -1;
} else {
startLimit = sdv.lookupTerm(start);
if (startLimit < 0) {
/*
* The term was not found in this segment. We'll use inserting-point as
* start ordinal (then, to be included in the interval, an ordinal needs to be
* greater or equal to startLimit)
*/
startLimit = (startLimit * -1) - 1;
} else {
/*
* The term exists in this segment, If the interval has start open (the limit is
* excluded), then we move one ordinal higher. Then, to be included in the
* interval, an ordinal needs to be greater or equal to startLimit
*/
if (startOpen) {
startLimit++;
}
}
}
if (end == null) {
/*
* Unset end. All ordinals will be lower than Long.MAX_VALUE.
*/
endLimit = Long.MAX_VALUE;
} else {
endLimit = sdv.lookupTerm(end);
if (endLimit < 0) {
/*
* The term was not found in this segment. We'll use insertion-point -1 as
* endLimit. To be included in this interval, ordinals must be lower or
* equal to endLimit
*/
endLimit = (endLimit * -1) - 2;
} else {
if (endOpen) {
/*
* The term exists in this segment, If the interval has start open (the
* limit is excluded), then we move one ordinal lower. Then, to be
* included in the interval, an ordinal needs to be lower or equal to
* endLimit
*/
endLimit--;
}
}
}
}
/**
* Update the ordinals based on the current reader. This method (or {@link
* #updateContext(SortedDocValues)} depending on the DocValues type) needs to be called for
* every reader before {@link #includes(long)} is called on any document of the reader.
*
* @param sdv DocValues for the current reader
*/
public void updateContext(SortedSetDocValues sdv) throws IOException {
if (start == null) {
/*
* Unset start. All ordinals will be greater than -1.
*/
startLimit = -1;
} else {
startLimit = sdv.lookupTerm(start);
if (startLimit < 0) {
/*
* The term was not found in this segment. We'll use inserting-point as
* start ordinal (then, to be included in the interval, an ordinal needs to be
* greater or equal to startLimit)
*/
startLimit = (startLimit * -1) - 1;
} else {
/*
* The term exists in this segment, If the interval has start open (the limit is
* excluded), then we move one ordinal higher. Then, to be included in the
* interval, an ordinal needs to be greater or equal to startLimit
*/
if (startOpen) {
startLimit++;
}
}
}
if (end == null) {
/*
* Unset end. All ordinals will be lower than Long.MAX_VALUE.
*/
endLimit = Long.MAX_VALUE;
} else {
endLimit = sdv.lookupTerm(end);
if (endLimit < 0) {
/*
* The term was not found in this segment. We'll use insertion-point -1 as
* endLimit. To be included in this interval, ordinals must be lower or
* equal to endLimit
*/
endLimit = (endLimit * -1) - 2;
} else {
/*
* The term exists in this segment, If the interval has start open (the
* limit is excluded), then we move one ordinal lower. Then, to be
* included in the interval, an ordinal needs to be lower or equal to
* endLimit
*/
if (endOpen) {
endLimit--;
}
}
}
}
/**
* Method to use to check whether a document should be counted for an interval or not. Before
* calling this method on a multi-valued and/or non-numeric field make sure you call {@link
* #updateContext(SortedDocValues)} or {@link #updateContext(SortedSetDocValues)} (depending on
* the DV type). It is OK to call this method without other previous calls on numeric fields
* (with {@link NumericDocValues})
*
* @param value For numeric single value fields, this {@code value} should be the {@code long}
* representation of the value of the document in the specified field. For multi-valued
* and/or non-numeric fields, {@code value} should be the ordinal of the term in the current
* segment
* @return
* <ul>
* <li>{@link IntervalCompareResult#INCLUDED} if the value is included in the interval
* <li>{@link IntervalCompareResult#GREATER_THAN_END} if the value is greater than {@code
* endLimit}
* <li>{@link IntervalCompareResult#LOWER_THAN_START} if the value is lower than {@code
* startLimit}
* </ul>
*
* @see org.apache.lucene.util.NumericUtils#floatToSortableInt(float)
* @see org.apache.lucene.util.NumericUtils#doubleToSortableLong(double)
*/
public IntervalCompareResult includes(long value) {
if (startLimit > value) {
return IntervalCompareResult.LOWER_THAN_START;
}
if (endLimit < value) {
return IntervalCompareResult.GREATER_THAN_END;
}
return IntervalCompareResult.INCLUDED;
}
/* Fill in sb with a string from i to the first unescaped comma, or n.
Return the index past the unescaped comma, or n if no unescaped comma exists */
private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError {
for (; i < n; ++i) {
char c = s.charAt(i);
if (c == '\\') {
++i;
if (i < n) {
c = s.charAt(i);
} else {
throw new SyntaxError("Unfinished escape at index " + i + " in facet interval " + s);
}
} else if (c == ',') {
return i + 1;
}
sb.append(c);
}
return n;
}
@Override
public String toString() {
return this.getClass().getSimpleName()
+ " [key="
+ key
+ ", start="
+ start
+ ", end="
+ end
+ ", startOpen="
+ startOpen
+ ", endOpen="
+ endOpen
+ "]";
}
/**
* @return The count of document that matched this interval
*/
public int getCount() {
if (includeNoDocs) {
return 0;
}
return this.count;
}
/** Increment the number of documents that match this interval */
void incCount() {
this.count++;
}
/**
* @return Human readable key for this interval
*/
public String getKey() {
return this.key;
}
}
/** Iterate over all the intervals */
@Override
public Iterator<FacetInterval> iterator() {
return new ArrayList<FacetInterval>(Arrays.asList(intervals)).iterator();
}
}
|
googleapis/google-cloud-java | 35,521 | java-securitycenter/proto-google-cloud-securitycenter-v1beta1/src/main/java/com/google/cloud/securitycenter/v1beta1/Source.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1beta1/source.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1beta1;
/**
*
*
* <pre>
* Security Command Center finding source. A finding source
* is an entity or a mechanism that can produce a finding. A source is like a
* container of findings that come from the same scanner, logger, monitor, etc.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1beta1.Source}
*/
public final class Source extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1beta1.Source)
SourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use Source.newBuilder() to construct.
private Source(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Source() {
name_ = "";
displayName_ = "";
description_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Source();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1beta1.SourceOuterClass
.internal_static_google_cloud_securitycenter_v1beta1_Source_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1beta1.SourceOuterClass
.internal_static_google_cloud_securitycenter_v1beta1_Source_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1beta1.Source.class,
com.google.cloud.securitycenter.v1beta1.Source.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DISPLAY_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The displayName.
*/
@java.lang.Override
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
}
}
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The bytes for displayName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object description_ = "";
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @return The bytes for description.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, description_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, description_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v1beta1.Source)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1beta1.Source other =
(com.google.cloud.securitycenter.v1beta1.Source) obj;
if (!getName().equals(other.getName())) return false;
if (!getDisplayName().equals(other.getDisplayName())) return false;
if (!getDescription().equals(other.getDescription())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDisplayName().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1beta1.Source parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.securitycenter.v1beta1.Source prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Security Command Center finding source. A finding source
* is an entity or a mechanism that can produce a finding. A source is like a
* container of findings that come from the same scanner, logger, monitor, etc.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1beta1.Source}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1beta1.Source)
com.google.cloud.securitycenter.v1beta1.SourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1beta1.SourceOuterClass
.internal_static_google_cloud_securitycenter_v1beta1_Source_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1beta1.SourceOuterClass
.internal_static_google_cloud_securitycenter_v1beta1_Source_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1beta1.Source.class,
com.google.cloud.securitycenter.v1beta1.Source.Builder.class);
}
// Construct using com.google.cloud.securitycenter.v1beta1.Source.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
displayName_ = "";
description_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1beta1.SourceOuterClass
.internal_static_google_cloud_securitycenter_v1beta1_Source_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1beta1.Source getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1beta1.Source.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1beta1.Source build() {
com.google.cloud.securitycenter.v1beta1.Source result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1beta1.Source buildPartial() {
com.google.cloud.securitycenter.v1beta1.Source result =
new com.google.cloud.securitycenter.v1beta1.Source(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.securitycenter.v1beta1.Source result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.displayName_ = displayName_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.description_ = description_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v1beta1.Source) {
return mergeFrom((com.google.cloud.securitycenter.v1beta1.Source) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securitycenter.v1beta1.Source other) {
if (other == com.google.cloud.securitycenter.v1beta1.Source.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDisplayName().isEmpty()) {
displayName_ = other.displayName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
displayName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
description_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The relative resource name of this source. See:
* https://cloud.google.com/apis/design/resource_names#relative_resource_name
* Example:
* "organizations/{organization_id}/sources/{source_id}"
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The displayName.
*/
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The bytes for displayName.
*/
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @param value The displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
displayName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDisplayName() {
displayName_ = getDefaultInstance().getDisplayName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The source's display name.
* A source's display name must be unique amongst its siblings, for example,
* two sources with the same parent can't share the same display name.
* The display name must have a length between 1 and 64 characters
* (inclusive).
* </pre>
*
* <code>string display_name = 2;</code>
*
* @param value The bytes for displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
displayName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object description_ = "";
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @return The bytes for description.
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The description of the source (max of 1024 characters).
* Example:
* "Web Security Scanner is a web security scanner for common
* vulnerabilities in App Engine applications. It can automatically
* scan and detect four common vulnerabilities, including cross-site-scripting
* (XSS), Flash injection, mixed content (HTTP in HTTPS), and
* outdated/insecure libraries."
* </pre>
*
* <code>string description = 3;</code>
*
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1beta1.Source)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1beta1.Source)
private static final com.google.cloud.securitycenter.v1beta1.Source DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1beta1.Source();
}
public static com.google.cloud.securitycenter.v1beta1.Source getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Source> PARSER =
new com.google.protobuf.AbstractParser<Source>() {
@java.lang.Override
public Source parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Source> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Source> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1beta1.Source getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 35,882 | classlib/modules/sound/src/main/java/javax/sound/midi/MidiSystem.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.sound.midi;
import java.util.ArrayList;
import java.util.List;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import javax.sound.midi.MidiDevice.Info;
import javax.sound.midi.spi.MidiDeviceProvider;
import javax.sound.midi.spi.MidiFileReader;
import javax.sound.midi.spi.MidiFileWriter;
import javax.sound.midi.spi.SoundbankReader;
import org.apache.harmony.sound.utils.ProviderService;
import org.apache.harmony.sound.internal.nls.Messages;
public class MidiSystem {
// This class has no public constructor
private MidiSystem() {
}
//path to javax.sound.midi.spi.MidiDeviceProvider file in the jar-file
private final static String midiDeviceProviderPath =
"META-INF/services/javax.sound.midi.spi.MidiDeviceProvider";
//path to javax.sound.midi.spi.MidiFileReader file in the jar-file
private final static String midiFileReaderPath =
"META-INF/services/javax.sound.midi.spi.MidiFileReader";
//path to javax.sound.midi.spi.MidiFileWriter file in the jar-file
private final static String midiFileWriterPath =
"META-INF/services/javax.sound.midi.spi.MidiFileWriter";
//path to javax.sound.midi.spi.SoundbankReader file in the jar-file
private final static String soundbankReaderPath =
"META-INF/services/javax.sound.midi.spi.SoundbankReader";
//key to find default receiver in the sound.properties file
private final static String receiverName = "javax.sound.midi.Receiver";
//key to find default sequencer in the sound.properties file
private final static String sequencerName = "javax.sound.midi.Sequencer";
//key to find default synthesizer in the sound.properties file
private final static String synthesizerName = "javax.sound.midi.Synthesizer";
//key to find default transmitter in the sound.properties file
private final static String transmitterName = "javax.sound.midi.Transmitter";
public static MidiDevice getMidiDevice(MidiDevice.Info info)
throws MidiUnavailableException {
//FIXME
/*
* this method must to throw out MidiUnavailableException if requested device
* is not available
*/
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
/*
* find device that describes by parameter info and return it
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.equals(info)) {
return ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(info);
}
}
}
/*
* if we can't find device with requested info, we throw out IllegalArgumentException
*/
throw new IllegalArgumentException(Messages.getString("sound.1F", info.getName()));
}
public static MidiDevice.Info[] getMidiDeviceInfo() {
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
//variable to save MidiDevice.Info
List<MidiDevice.Info> infos = new ArrayList<MidiDevice.Info>();
/*
* look through list of providers and save info of devices
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
infos.add(element);
}
}
MidiDevice.Info[] temp = new MidiDevice.Info[infos.size()];
return infos.toArray(temp);
}
public static MidiFileFormat getMidiFileFormat(File file) throws InvalidMidiDataException,
IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getMidiFileFormat(file);
}
public static MidiFileFormat getMidiFileFormat(InputStream stream) throws InvalidMidiDataException,
IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getMidiFileFormat(stream);
}
public static MidiFileFormat getMidiFileFormat(URL url) throws InvalidMidiDataException,
IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getMidiFileFormat(url);
}
public static int[] getMidiFileTypes() {
/*
* obtain the list of MidiFileWriterProviders
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).getMidiFileTypes();
}
public static int[] getMidiFileTypes(Sequence sequence) {
/*
* obtain the list of MidiFileWriterProviders
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).getMidiFileTypes(sequence);
}
public static Receiver getReceiver() throws MidiUnavailableException {
/*
* description of the default device for javax.sound.midi.Receiver
*/
List<String> defaultDevice = ProviderService.getDefaultDeviceDescription(receiverName);
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
String provName;
int deviceNum = -1;
/*
* defaultDevice.get(0) --> provider
* defaultDevice.get(1) --> name
*/
if (defaultDevice.size() != 0) {
/*
* obtain the provider number in the list of deviceProviders that is provider for default device
*/
for (int i = 0; i < deviceProviders.size(); i++) {
provName = deviceProviders.get(i).toString();
if (provName.substring(0, provName.indexOf("@")).equals(defaultDevice.get(0))) {
deviceNum = i;
break;
}
}
/*
* the first case: find the same provider and name that describes by default device
*/
if (deviceNum != -1) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
try {
return ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element).getReceiver();
} catch (MidiUnavailableException e) {}
}
}
for (Info element : deviceInfo) {
try {
return ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element).getReceiver();
} catch (MidiUnavailableException e) {}
}
}
/*
* if we don't find again, find any receivers describe by name
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
try {
return ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element).getReceiver();
} catch (MidiUnavailableException e) {}
}
}
}
}
/*
* in the last case we look throw all providers and find any receiver
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
try {
return ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element).getReceiver();
} catch (MidiUnavailableException e) {}
}
}
/*
* if we don't find anyway, we throw out MidiUnavailableException
*/
throw new MidiUnavailableException(Messages.getString("sound.1B"));
}
public static Sequence getSequence(File file) throws InvalidMidiDataException, IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getSequence(file);
}
public static Sequence getSequence(InputStream stream) throws InvalidMidiDataException,
IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getSequence(stream);
}
public static Sequence getSequence(URL url) throws InvalidMidiDataException, IOException {
/*
* obtain the list of MidiFileReaderProviders
*/
List<?> fileReaderProviders = ProviderService.getProviders(midiFileReaderPath);
if (fileReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.19"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileReader) fileReaderProviders.get(0)).getSequence(url);
}
public static Sequencer getSequencer() throws MidiUnavailableException {
/*
* this method is equals to method MidiSystem.getSequencer(true)
*/
return getSequencer(true);
}
public static Sequencer getSequencer(boolean connected) throws MidiUnavailableException {
/*
* description of the default device for javax.sound.midi.Sequencer
*/
List<String> defaultDevice = ProviderService.getDefaultDeviceDescription(sequencerName);
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
Sequencer sequencer;
Transmitter seqTrans;
Synthesizer synth;
Receiver recv;
String provName;
int deviceNum = -1;
/*
* defaultDevice.get(0) --> provider
* defaultDevice.get(1) --> name
*/
if (defaultDevice.size() != 0) {
/*
* obtain the provider number in the list of deviceProviders that is provider for default device
*/
for (int i = 0; i < deviceProviders.size(); i++) {
provName = deviceProviders.get(i).toString();
if (provName.substring(0, provName.indexOf("@")).equals(defaultDevice.get(0))) {
deviceNum = i;
break;
}
}
/*
* the first case: find the same provider and name that describes by default device
*/
if (deviceNum != -1) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
if (((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element) instanceof Sequencer) {
if (connected) {
sequencer = (Sequencer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
seqTrans = sequencer.getTransmitter();
try {
synth = MidiSystem.getSynthesizer();
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
/*
* if we haven't Synthesizer in the system, we use default receiver
*/
recv = MidiSystem.getReceiver();
}
seqTrans.setReceiver(recv);
return sequencer;
}
return (Sequencer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
}
}
}
for (Info element : deviceInfo) {
if (((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element) instanceof Sequencer) {
if (connected) {
sequencer = (Sequencer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
seqTrans = sequencer.getTransmitter();
try {
synth = MidiSystem.getSynthesizer();
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
/*
* if we haven't Synthesizer in the system, we use default receiver
*/
recv = MidiSystem.getReceiver();
}
seqTrans.setReceiver(recv);
return sequencer;
}
return (Sequencer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
}
}
}
/*
* if we don't find again, find any receivers describe by name
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
if (((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element) instanceof Sequencer) {
if (connected) {
sequencer = (Sequencer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
seqTrans = sequencer.getTransmitter();
try {
synth = MidiSystem.getSynthesizer();
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
/*
* if we haven't Synthesizer in the system, we use default receiver
*/
recv = MidiSystem.getReceiver();
}
seqTrans.setReceiver(recv);
return sequencer;
}
return (Sequencer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
}
}
}
}
}
/*
* in the last case we look throw all providers and find any receiver
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element) instanceof Sequencer) {
if (connected) {
sequencer = (Sequencer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
seqTrans = sequencer.getTransmitter();
try {
synth = MidiSystem.getSynthesizer();
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
/*
* if we haven't Synthesizer in the system, we use default receiver
*/
recv = MidiSystem.getReceiver();
}
seqTrans.setReceiver(recv);
return sequencer;
}
return (Sequencer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
}
}
}
/*
* if we don't find anyway, we throw out MidiUnavailableException
*/
throw new MidiUnavailableException(Messages.getString("sound.1C"));
}
public static Soundbank getSoundbank(File file) throws InvalidMidiDataException,
IOException {
/*
* obtain the list of SoundbankReaderProviders
*/
List<?> soundbankReaderProviders = ProviderService.getProviders(soundbankReaderPath);
if (soundbankReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1D"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((SoundbankReader) soundbankReaderProviders.get(0)).getSoundbank(file);
}
public static Soundbank getSoundbank(InputStream stream) throws InvalidMidiDataException, IOException {
/*
* obtain the list of SoundbankReaderProviders
*/
List<?> soundbankReaderProviders = ProviderService.getProviders(soundbankReaderPath);
if (soundbankReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1D"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((SoundbankReader) soundbankReaderProviders.get(0)).getSoundbank(stream);
}
public static Soundbank getSoundbank(URL url) throws InvalidMidiDataException, IOException {
/*
* obtain the list of SoundbankReaderProviders
*/
List<?> soundbankReaderProviders = ProviderService.getProviders(soundbankReaderPath);
if (soundbankReaderProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1D"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((SoundbankReader) soundbankReaderProviders.get(0)).getSoundbank(url);
}
public static Synthesizer getSynthesizer() throws MidiUnavailableException {
/*
* description of the default device for javax.sound.midi.Synthesizer
*/
List<String> defaultDevice = ProviderService.getDefaultDeviceDescription(synthesizerName);
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
String provName;
int deviceNum = -1;
/*
* defaultDevice.get(0) --> provider
* defaultDevice.get(1) --> name
*/
if (defaultDevice.size() != 0) {
/*
* obtain the provider number in the list of deviceProviders that is provider for default device
*/
for (int i = 0; i < deviceProviders.size(); i++) {
provName = deviceProviders.get(i).toString();
if (provName.substring(0, provName.indexOf("@")).equals(defaultDevice.get(0))) {
deviceNum = i;
break;
}
}
/*
* the first case: find the same provider and name that describes by default device
*/
if (deviceNum != -1) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
if (((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element) instanceof Synthesizer) {
return (Synthesizer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
}
}
}
for (Info element : deviceInfo) {
if (((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element) instanceof Synthesizer) {
return (Synthesizer) ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element);
}
}
}
/*
* if we don't find again, find any receivers describe by name
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
if (((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element) instanceof Synthesizer) {
return (Synthesizer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
}
}
}
}
}
/*
* in the last case we look throw all providers and find any receiver
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element) instanceof Synthesizer) {
return (Synthesizer) ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element);
}
}
}
/*
* if we don't find anyway, we throw out MidiUnavailableException
*/
throw new MidiUnavailableException(Messages.getString("sound.1C"));
}
public static Transmitter getTransmitter() throws MidiUnavailableException {
/*
* description of the default device for javax.sound.midi.Transmitter
*/
List<String> defaultDevice = ProviderService.getDefaultDeviceDescription(transmitterName);
/*
* obtain the list of MidiDeviceProviders
*/
List<?> deviceProviders = ProviderService.getProviders(midiDeviceProviderPath);
String provName;
int deviceNum = -1;
/*
* defaultDevice.get(0) --> provider
* defaultDevice.get(1) --> name
*/
if (defaultDevice.size() != 0) {
/*
* obtain the provider number in the list of deviceProviders that is provider for default device
*/
for (int i = 0; i < deviceProviders.size(); i++) {
provName = deviceProviders.get(i).toString();
if (provName.substring(0, provName.indexOf("@")).equals(defaultDevice.get(0))) {
deviceNum = i;
break;
}
}
/*
* the first case: find the same provider and name that describes by default device
*/
if (deviceNum != -1) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
try {
return ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element).getTransmitter();
} catch (MidiUnavailableException e) {}
}
}
for (Info element : deviceInfo) {
try {
return ((MidiDeviceProvider) deviceProviders.get(deviceNum)).getDevice(element).getTransmitter();
} catch (MidiUnavailableException e) {}
}
}
/*
* if we don't find again, find any receivers describe by name
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
if (element.getName().equals(defaultDevice.get(1))) {
try {
return ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element).getTransmitter();
} catch (MidiUnavailableException e) {}
}
}
}
}
/*
* in the last case we look throw all providers and find any receiver
*/
for (int i = 0; i < deviceProviders.size(); i++) {
MidiDevice.Info[] deviceInfo = ((MidiDeviceProvider) deviceProviders.get(i)).getDeviceInfo();
for (Info element : deviceInfo) {
try {
return ((MidiDeviceProvider) deviceProviders.get(i)).getDevice(element).getTransmitter();
} catch (MidiUnavailableException e) {}
}
}
/*
* if we don't find anyway, we throw out MidiUnavailableException
*/
throw new MidiUnavailableException(Messages.getString("sound.1E"));
}
public static boolean isFileTypeSupported(int fileType) {
/*
* obtain the list of MidiFileWriterProviders;
* if we already obtain the list of providers, we don't obtain it again
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).isFileTypeSupported(fileType);
}
public static boolean isFileTypeSupported(int fileType, Sequence sequence) {
/*
* obtain the list of MidiFileWriterProviders
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).isFileTypeSupported(fileType, sequence);
}
public static int write(Sequence in, int type, File out) throws IOException {
/*
* obtain the list of MidiFileWriterProviders
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).write(in, type, out);
}
public static int write(Sequence in, int fileType, OutputStream out) throws IOException {
/*
* obtain the list of MidiFileWriterProviders
*/
List<?> fileWriterProviders = ProviderService.getProviders(midiFileWriterPath);
if (fileWriterProviders.size() == 0) {
//FIXME
/*
* I don't understand what type of exception we should throw out if we haven't
* appropriate providers...
* Maybe here is should be MidiUnavailableException
*/
throw new Error(Messages.getString("sound.1A"));
}
/*
* It's not determine what provider for this service I should to use, and so
* I use the first one
*/
return ((MidiFileWriter) fileWriterProviders.get(0)).write(in, fileType, out);
}
}
|
apache/hadoop-common | 35,687 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer;
import static org.mockito.Mockito.any;
import static org.mockito.Matchers.isA;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceFailedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRecoveredEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
public class TestLocalResourcesTrackerImpl {
@Test(timeout=10000)
@SuppressWarnings("unchecked")
public void test() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
ContainerId cId2 = BuilderUtils.newContainerId(1, 1, 1, 2);
LocalizerContext lc2 = new LocalizerContext(user, cId2, null);
LocalResourceRequest req1 =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
LocalResourceRequest req2 =
createLocalResourceRequest(user, 2, 1, LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
LocalizedResource lr2 = createLocalizedResource(req2, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
localrsrc.put(req2, lr2);
LocalResourcesTracker tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
false, conf, new NMNullStateStoreService());
ResourceEvent req11Event =
new ResourceRequestEvent(req1, LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent req12Event =
new ResourceRequestEvent(req1, LocalResourceVisibility.PUBLIC, lc2);
ResourceEvent req21Event =
new ResourceRequestEvent(req2, LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent rel11Event = new ResourceReleaseEvent(req1, cId1);
ResourceEvent rel12Event = new ResourceReleaseEvent(req1, cId2);
ResourceEvent rel21Event = new ResourceReleaseEvent(req2, cId1);
// Localize R1 for C1
tracker.handle(req11Event);
// Localize R1 for C2
tracker.handle(req12Event);
// Localize R2 for C1
tracker.handle(req21Event);
dispatcher.await();
verify(localizerEventHandler, times(3)).handle(
any(LocalizerResourceRequestEvent.class));
// Verify refCount for R1 is 2
Assert.assertEquals(2, lr1.getRefCount());
// Verify refCount for R2 is 1
Assert.assertEquals(1, lr2.getRefCount());
// Release R2 for C1
tracker.handle(rel21Event);
dispatcher.await();
verifyTrackedResourceCount(tracker, 2);
// Verify resource with non zero ref count is not removed.
Assert.assertEquals(2, lr1.getRefCount());
Assert.assertFalse(tracker.remove(lr1, mockDelService));
verifyTrackedResourceCount(tracker, 2);
// Localize resource1
ResourceLocalizedEvent rle =
new ResourceLocalizedEvent(req1, new Path("file:///tmp/r1"), 1);
lr1.handle(rle);
Assert.assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
// Release resource1
tracker.handle(rel11Event);
tracker.handle(rel12Event);
Assert.assertEquals(0, lr1.getRefCount());
// Verify resources in state LOCALIZED with ref-count=0 is removed.
Assert.assertTrue(tracker.remove(lr1, mockDelService));
verifyTrackedResourceCount(tracker, 1);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test(timeout=10000)
@SuppressWarnings("unchecked")
public void testConsistency() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler = mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler = mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
LocalResourceRequest req1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
LocalizedResource lr1 = createLocalizedResource(req1, dispatcher);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
localrsrc.put(req1, lr1);
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
null, dispatcher, localrsrc, false, conf,
new NMNullStateStoreService());
ResourceEvent req11Event = new ResourceRequestEvent(req1,
LocalResourceVisibility.PUBLIC, lc1);
ResourceEvent rel11Event = new ResourceReleaseEvent(req1, cId1);
// Localize R1 for C1
tracker.handle(req11Event);
dispatcher.await();
// Verify refCount for R1 is 1
Assert.assertEquals(1, lr1.getRefCount());
dispatcher.await();
verifyTrackedResourceCount(tracker, 1);
// Localize resource1
ResourceLocalizedEvent rle = new ResourceLocalizedEvent(req1, new Path(
"file:///tmp/r1"), 1);
lr1.handle(rle);
Assert.assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
Assert.assertTrue(createdummylocalizefile(new Path("file:///tmp/r1")));
LocalizedResource rsrcbefore = tracker.iterator().next();
File resFile = new File(lr1.getLocalPath().toUri().getRawPath()
.toString());
Assert.assertTrue(resFile.exists());
Assert.assertTrue(resFile.delete());
// Localize R1 for C1
tracker.handle(req11Event);
dispatcher.await();
lr1.handle(rle);
Assert.assertTrue(lr1.getState().equals(ResourceState.LOCALIZED));
LocalizedResource rsrcafter = tracker.iterator().next();
if (rsrcbefore == rsrcafter) {
Assert.fail("Localized resource should not be equal");
}
// Release resource1
tracker.handle(rel11Event);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test(timeout = 1000)
@SuppressWarnings("unchecked")
public void testLocalResourceCache() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<ContainerEvent> containerEventHandler =
mock(EventHandler.class);
// Registering event handlers.
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
LocalResourcesTracker tracker =
new LocalResourcesTrackerImpl(user, null, dispatcher, localrsrc,
true, conf, new NMNullStateStoreService());
LocalResourceRequest lr =
createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
// Creating 2 containers for same application which will be requesting
// same local resource.
// Container 1 requesting local resource.
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
ResourceEvent reqEvent1 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc1);
// No resource request is initially present in local cache
Assert.assertEquals(0, localrsrc.size());
// Container-1 requesting local resource.
tracker.handle(reqEvent1);
dispatcher.await();
// New localized Resource should have been added to local resource map
// and the requesting container will be added to its waiting queue.
Assert.assertEquals(1, localrsrc.size());
Assert.assertTrue(localrsrc.containsKey(lr));
Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
Assert.assertTrue(localrsrc.get(lr).ref.contains(cId1));
Assert.assertEquals(ResourceState.DOWNLOADING, localrsrc.get(lr)
.getState());
// Container 2 requesting the resource
ContainerId cId2 = BuilderUtils.newContainerId(1, 1, 1, 2);
LocalizerContext lc2 = new LocalizerContext(user, cId2, null);
ResourceEvent reqEvent2 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc2);
tracker.handle(reqEvent2);
dispatcher.await();
// Container 2 should have been added to the waiting queue of the local
// resource
Assert.assertEquals(2, localrsrc.get(lr).getRefCount());
Assert.assertTrue(localrsrc.get(lr).ref.contains(cId2));
// Failing resource localization
ResourceEvent resourceFailedEvent = new ResourceFailedLocalizationEvent(
lr,(new Exception("test").getMessage()));
// Backing up the resource to track its state change as it will be
// removed after the failed event.
LocalizedResource localizedResource = localrsrc.get(lr);
tracker.handle(resourceFailedEvent);
dispatcher.await();
// After receiving failed resource event; all waiting containers will be
// notified with Container Resource Failed Event.
Assert.assertEquals(0, localrsrc.size());
verify(containerEventHandler, times(2)).handle(
isA(ContainerResourceFailedEvent.class));
Assert.assertEquals(ResourceState.FAILED, localizedResource.getState());
// Container 1 trying to release the resource (This resource is already
// deleted from the cache. This call should return silently without
// exception.
ResourceReleaseEvent relEvent1 = new ResourceReleaseEvent(lr, cId1);
tracker.handle(relEvent1);
dispatcher.await();
// Container-3 now requests for the same resource. This request call
// is coming prior to Container-2's release call.
ContainerId cId3 = BuilderUtils.newContainerId(1, 1, 1, 3);
LocalizerContext lc3 = new LocalizerContext(user, cId3, null);
ResourceEvent reqEvent3 =
new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc3);
tracker.handle(reqEvent3);
dispatcher.await();
// Local resource cache now should have the requested resource and the
// number of waiting containers should be 1.
Assert.assertEquals(1, localrsrc.size());
Assert.assertTrue(localrsrc.containsKey(lr));
Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
Assert.assertTrue(localrsrc.get(lr).ref.contains(cId3));
// Container-2 Releases the resource
ResourceReleaseEvent relEvent2 = new ResourceReleaseEvent(lr, cId2);
tracker.handle(relEvent2);
dispatcher.await();
// Making sure that there is no change in the cache after the release.
Assert.assertEquals(1, localrsrc.size());
Assert.assertTrue(localrsrc.containsKey(lr));
Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
Assert.assertTrue(localrsrc.get(lr).ref.contains(cId3));
// Sending ResourceLocalizedEvent to tracker. In turn resource should
// send Container Resource Localized Event to waiting containers.
Path localizedPath = new Path("/tmp/file1");
ResourceLocalizedEvent localizedEvent =
new ResourceLocalizedEvent(lr, localizedPath, 123L);
tracker.handle(localizedEvent);
dispatcher.await();
// Verifying ContainerResourceLocalizedEvent .
verify(containerEventHandler, times(1)).handle(
isA(ContainerResourceLocalizedEvent.class));
Assert.assertEquals(ResourceState.LOCALIZED, localrsrc.get(lr)
.getState());
Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
// Container-3 releasing the resource.
ResourceReleaseEvent relEvent3 = new ResourceReleaseEvent(lr, cId3);
tracker.handle(relEvent3);
dispatcher.await();
Assert.assertEquals(0, localrsrc.get(lr).getRefCount());
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test(timeout = 100000)
@SuppressWarnings("unchecked")
public void testHierarchicalLocalCacheDirectories() {
String user = "testuser";
DrainDispatcher dispatcher = null;
try {
Configuration conf = new Configuration();
// setting per directory file limit to 1.
conf.set(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY, "37");
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
null, dispatcher, localrsrc, true, conf,
new NMNullStateStoreService());
// This is a random path. NO File creation will take place at this place.
Path localDir = new Path("/tmp");
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent1);
// Simulate the process of localization of lr1
// NOTE: Localization path from tracker has resource ID at end
Path hierarchicalPath1 =
tracker.getPathForLocalization(lr1, localDir).getParent();
// Simulate lr1 getting localized
ResourceLocalizedEvent rle1 =
new ResourceLocalizedEvent(lr1,
new Path(hierarchicalPath1.toUri().toString() +
Path.SEPARATOR + "file1"), 120);
tracker.handle(rle1);
// Localization successful.
LocalResourceRequest lr2 = createLocalResourceRequest(user, 3, 3,
LocalResourceVisibility.PUBLIC);
// Container 1 requests lr2 to be localized.
ResourceEvent reqEvent2 =
new ResourceRequestEvent(lr2, LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent2);
Path hierarchicalPath2 =
tracker.getPathForLocalization(lr2, localDir).getParent();
// localization failed.
ResourceFailedLocalizationEvent rfe2 =
new ResourceFailedLocalizationEvent(
lr2, new Exception("Test").toString());
tracker.handle(rfe2);
/*
* The path returned for two localization should be different because we
* are limiting one file per sub-directory.
*/
Assert.assertNotSame(hierarchicalPath1, hierarchicalPath2);
LocalResourceRequest lr3 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.PUBLIC);
ResourceEvent reqEvent3 = new ResourceRequestEvent(lr3,
LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent3);
Path hierarchicalPath3 =
tracker.getPathForLocalization(lr3, localDir).getParent();
// localization successful
ResourceLocalizedEvent rle3 =
new ResourceLocalizedEvent(lr3, new Path(hierarchicalPath3.toUri()
.toString() + Path.SEPARATOR + "file3"), 120);
tracker.handle(rle3);
// Verifying that path created is inside the subdirectory
Assert.assertEquals(hierarchicalPath3.toUri().toString(),
hierarchicalPath1.toUri().toString() + Path.SEPARATOR + "0");
// Container 1 releases resource lr1
ResourceEvent relEvent1 = new ResourceReleaseEvent(lr1, cId1);
tracker.handle(relEvent1);
// Validate the file counts now
int resources = 0;
Iterator<LocalizedResource> iter = tracker.iterator();
while (iter.hasNext()) {
iter.next();
resources++;
}
// There should be only two resources lr1 and lr3 now.
Assert.assertEquals(2, resources);
// Now simulate cache cleanup - removes unused resources.
iter = tracker.iterator();
while (iter.hasNext()) {
LocalizedResource rsrc = iter.next();
if (rsrc.getRefCount() == 0) {
Assert.assertTrue(tracker.remove(rsrc, mockDelService));
resources--;
}
}
// lr1 is not used by anyone and will be removed, only lr3 will hang
// around
Assert.assertEquals(1, resources);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testStateStoreSuccessfulLocalization() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
DeletionService mockDelService = mock(DeletionService.class);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.APPLICATION, lc1);
tracker.handle(reqEvent1);
dispatcher.await();
// Simulate the process of localization of lr1
Path hierarchicalPath1 = tracker.getPathForLocalization(lr1, localDir);
ArgumentCaptor<LocalResourceProto> localResourceCaptor =
ArgumentCaptor.forClass(LocalResourceProto.class);
ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
verify(stateStore).startResourceLocalization(eq(user), eq(appId),
localResourceCaptor.capture(), pathCaptor.capture());
LocalResourceProto lrProto = localResourceCaptor.getValue();
Path localizedPath1 = pathCaptor.getValue();
Assert.assertEquals(lr1,
new LocalResourceRequest(new LocalResourcePBImpl(lrProto)));
Assert.assertEquals(hierarchicalPath1, localizedPath1.getParent());
// Simulate lr1 getting localized
ResourceLocalizedEvent rle1 =
new ResourceLocalizedEvent(lr1, pathCaptor.getValue(), 120);
tracker.handle(rle1);
dispatcher.await();
ArgumentCaptor<LocalizedResourceProto> localizedProtoCaptor =
ArgumentCaptor.forClass(LocalizedResourceProto.class);
verify(stateStore).finishResourceLocalization(eq(user), eq(appId),
localizedProtoCaptor.capture());
LocalizedResourceProto localizedProto = localizedProtoCaptor.getValue();
Assert.assertEquals(lr1, new LocalResourceRequest(
new LocalResourcePBImpl(localizedProto.getResource())));
Assert.assertEquals(localizedPath1.toString(),
localizedProto.getLocalPath());
LocalizedResource localizedRsrc1 = tracker.getLocalizedResource(lr1);
Assert.assertNotNull(localizedRsrc1);
// simulate release and retention processing
tracker.handle(new ResourceReleaseEvent(lr1, cId1));
dispatcher.await();
boolean removeResult = tracker.remove(localizedRsrc1, mockDelService);
Assert.assertTrue(removeResult);
verify(stateStore).removeLocalizedResource(eq(user), eq(appId),
eq(localizedPath1));
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testStateStoreFailedLocalization() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
// Container 1 requests lr1 to be localized
ResourceEvent reqEvent1 = new ResourceRequestEvent(lr1,
LocalResourceVisibility.APPLICATION, lc1);
tracker.handle(reqEvent1);
dispatcher.await();
// Simulate the process of localization of lr1
Path hierarchicalPath1 = tracker.getPathForLocalization(lr1, localDir);
ArgumentCaptor<LocalResourceProto> localResourceCaptor =
ArgumentCaptor.forClass(LocalResourceProto.class);
ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
verify(stateStore).startResourceLocalization(eq(user), eq(appId),
localResourceCaptor.capture(), pathCaptor.capture());
LocalResourceProto lrProto = localResourceCaptor.getValue();
Path localizedPath1 = pathCaptor.getValue();
Assert.assertEquals(lr1,
new LocalResourceRequest(new LocalResourcePBImpl(lrProto)));
Assert.assertEquals(hierarchicalPath1, localizedPath1.getParent());
ResourceFailedLocalizationEvent rfe1 =
new ResourceFailedLocalizationEvent(
lr1, new Exception("Test").toString());
tracker.handle(rfe1);
dispatcher.await();
verify(stateStore).removeLocalizedResource(eq(user), eq(appId),
eq(localizedPath1));
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testRecoveredResource() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDir = new Path("/tmp/localdir");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, false, conf, stateStore);
// Container 1 needs lr1 resource
ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.APPLICATION);
Assert.assertNull(tracker.getLocalizedResource(lr1));
final long localizedId1 = 52;
Path hierarchicalPath1 = new Path(localDir,
Long.toString(localizedId1));
Path localizedPath1 = new Path(hierarchicalPath1, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr1, localizedPath1, 120));
dispatcher.await();
Assert.assertNotNull(tracker.getLocalizedResource(lr1));
// verify new paths reflect recovery of previous resources
LocalResourceRequest lr2 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.APPLICATION);
LocalizerContext lc2 = new LocalizerContext(user, cId1, null);
ResourceEvent reqEvent2 = new ResourceRequestEvent(lr2,
LocalResourceVisibility.APPLICATION, lc2);
tracker.handle(reqEvent2);
dispatcher.await();
Path hierarchicalPath2 = tracker.getPathForLocalization(lr2, localDir);
long localizedId2 = Long.parseLong(hierarchicalPath2.getName());
Assert.assertEquals(localizedId1 + 1, localizedId2);
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testRecoveredResourceWithDirCacheMgr() throws Exception {
final String user = "someuser";
final ApplicationId appId = ApplicationId.newInstance(1, 1);
// This is a random path. NO File creation will take place at this place.
final Path localDirRoot = new Path("/tmp/localdir");
Configuration conf = new YarnConfiguration();
DrainDispatcher dispatcher = null;
dispatcher = createDispatcher(conf);
EventHandler<LocalizerEvent> localizerEventHandler =
mock(EventHandler.class);
EventHandler<LocalizerEvent> containerEventHandler =
mock(EventHandler.class);
dispatcher.register(LocalizerEventType.class, localizerEventHandler);
dispatcher.register(ContainerEventType.class, containerEventHandler);
NMStateStoreService stateStore = mock(NMStateStoreService.class);
try {
LocalResourcesTrackerImpl tracker = new LocalResourcesTrackerImpl(user,
appId, dispatcher, true, conf, stateStore);
LocalResourceRequest lr1 = createLocalResourceRequest(user, 1, 1,
LocalResourceVisibility.PUBLIC);
Assert.assertNull(tracker.getLocalizedResource(lr1));
final long localizedId1 = 52;
Path hierarchicalPath1 = new Path(localDirRoot + "/4/2",
Long.toString(localizedId1));
Path localizedPath1 = new Path(hierarchicalPath1, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr1, localizedPath1, 120));
dispatcher.await();
Assert.assertNotNull(tracker.getLocalizedResource(lr1));
LocalCacheDirectoryManager dirMgrRoot =
tracker.getDirectoryManager(localDirRoot);
Assert.assertEquals(0, dirMgrRoot.getDirectory("").getCount());
Assert.assertEquals(1, dirMgrRoot.getDirectory("4/2").getCount());
LocalResourceRequest lr2 = createLocalResourceRequest(user, 2, 2,
LocalResourceVisibility.PUBLIC);
Assert.assertNull(tracker.getLocalizedResource(lr2));
final long localizedId2 = localizedId1 + 1;
Path hierarchicalPath2 = new Path(localDirRoot + "/4/2",
Long.toString(localizedId2));
Path localizedPath2 = new Path(hierarchicalPath2, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr2, localizedPath2, 120));
dispatcher.await();
Assert.assertNotNull(tracker.getLocalizedResource(lr2));
Assert.assertEquals(0, dirMgrRoot.getDirectory("").getCount());
Assert.assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
LocalResourceRequest lr3 = createLocalResourceRequest(user, 3, 3,
LocalResourceVisibility.PUBLIC);
Assert.assertNull(tracker.getLocalizedResource(lr3));
final long localizedId3 = 128;
Path hierarchicalPath3 = new Path(localDirRoot + "/4/3",
Long.toString(localizedId3));
Path localizedPath3 = new Path(hierarchicalPath3, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr3, localizedPath3, 120));
dispatcher.await();
Assert.assertNotNull(tracker.getLocalizedResource(lr3));
Assert.assertEquals(0, dirMgrRoot.getDirectory("").getCount());
Assert.assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
Assert.assertEquals(1, dirMgrRoot.getDirectory("4/3").getCount());
LocalResourceRequest lr4 = createLocalResourceRequest(user, 4, 4,
LocalResourceVisibility.PUBLIC);
Assert.assertNull(tracker.getLocalizedResource(lr4));
final long localizedId4 = 256;
Path hierarchicalPath4 = new Path(localDirRoot + "/4",
Long.toString(localizedId4));
Path localizedPath4 = new Path(hierarchicalPath4, "resource.jar");
tracker.handle(new ResourceRecoveredEvent(lr4, localizedPath4, 120));
dispatcher.await();
Assert.assertNotNull(tracker.getLocalizedResource(lr4));
Assert.assertEquals(0, dirMgrRoot.getDirectory("").getCount());
Assert.assertEquals(1, dirMgrRoot.getDirectory("4").getCount());
Assert.assertEquals(2, dirMgrRoot.getDirectory("4/2").getCount());
Assert.assertEquals(1, dirMgrRoot.getDirectory("4/3").getCount());
} finally {
if (dispatcher != null) {
dispatcher.stop();
}
}
}
private boolean createdummylocalizefile(Path path) {
boolean ret = false;
File file = new File(path.toUri().getRawPath().toString());
try {
ret = file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
return ret;
}
private void verifyTrackedResourceCount(LocalResourcesTracker tracker,
int expected) {
int count = 0;
Iterator<LocalizedResource> iter = tracker.iterator();
while (iter.hasNext()) {
iter.next();
count++;
}
Assert.assertEquals("Tracker resource count does not match", expected,
count);
}
private LocalResourceRequest createLocalResourceRequest(String user, int i,
long ts, LocalResourceVisibility vis) {
final LocalResourceRequest req =
new LocalResourceRequest(new Path("file:///tmp/" + user + "/rsrc" + i),
ts + i * 2000, LocalResourceType.FILE, vis, null);
return req;
}
private LocalizedResource createLocalizedResource(LocalResourceRequest req,
Dispatcher dispatcher) {
LocalizedResource lr = new LocalizedResource(req, dispatcher);
return lr;
}
private DrainDispatcher createDispatcher(Configuration conf) {
DrainDispatcher dispatcher = new DrainDispatcher();
dispatcher.init(conf);
dispatcher.start();
return dispatcher;
}
}
|
googleapis/google-cloud-java | 35,478 | java-assured-workloads/proto-google-cloud-assured-workloads-v1beta1/src/main/java/com/google/cloud/assuredworkloads/v1beta1/ListWorkloadsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/assuredworkloads/v1beta1/assuredworkloads.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.assuredworkloads.v1beta1;
/**
*
*
* <pre>
* Request for fetching workloads in an organization.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest}
*/
public final class ListWorkloadsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest)
ListWorkloadsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListWorkloadsRequest.newBuilder() to construct.
private ListWorkloadsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListWorkloadsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListWorkloadsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.class,
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest)) {
return super.equals(obj);
}
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest other =
(com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for fetching workloads in an organization.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest)
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.class,
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.Builder.class);
}
// Construct using com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest
getDefaultInstanceForType() {
return com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest build() {
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest buildPartial() {
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest result =
new com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest) {
return mergeFrom((com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest other) {
if (other
== com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest)
private static final com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest();
}
public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListWorkloadsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListWorkloadsRequest>() {
@java.lang.Override
public ListWorkloadsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListWorkloadsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListWorkloadsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/conscrypt | 35,806 | common/src/main/java/org/conscrypt/Conscrypt.java | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.conscrypt;
import org.conscrypt.io.IoUtils;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.security.KeyManagementException;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.cert.X509Certificate;
import java.util.Properties;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLContextSpi;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLServerSocketFactory;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSessionContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
/**
* Core API for creating and configuring all Conscrypt types.
*/
@SuppressWarnings("unused")
public final class Conscrypt {
private Conscrypt() {}
/**
* Returns {@code true} if the Conscrypt native library has been successfully loaded.
*/
public static boolean isAvailable() {
try {
checkAvailability();
return true;
} catch (Throwable e) {
return false;
}
}
/**
* Return {@code true} if BoringSSL has been built in FIPS mode.
*/
public static boolean isBoringSslFIPSBuild() {
try {
return NativeCrypto.usesBoringSsl_FIPS_mode();
} catch (Throwable e) {
return false;
}
}
public static class Version {
private final int major;
private final int minor;
private final int patch;
private Version(int major, int minor, int patch) {
this.major = major;
this.minor = minor;
this.patch = patch;
}
public int major() {
return major;
}
public int minor() {
return minor;
}
public int patch() {
return patch;
}
}
private static final Version VERSION;
static {
int major = -1;
int minor = -1;
int patch = -1;
InputStream stream = null;
try {
stream = Conscrypt.class.getResourceAsStream("conscrypt.properties");
if (stream != null) {
Properties props = new Properties();
props.load(stream);
major = Integer.parseInt(props.getProperty("org.conscrypt.version.major", "-1"));
minor = Integer.parseInt(props.getProperty("org.conscrypt.version.minor", "-1"));
patch = Integer.parseInt(props.getProperty("org.conscrypt.version.patch", "-1"));
}
} catch (IOException e) {
// TODO(prb): This should probably be fatal or have some fallback behaviour
} finally {
IoUtils.closeQuietly(stream);
}
if ((major >= 0) && (minor >= 0) && (patch >= 0)) {
VERSION = new Version(major, minor, patch);
} else {
VERSION = null;
}
}
/**
* Returns the version of this distribution of Conscrypt. If version information is
* unavailable, returns {@code null}.
*/
public static Version version() {
return VERSION;
}
/**
* Checks that the Conscrypt support is available for the system.
*
* @throws UnsatisfiedLinkError if unavailable
*/
public static void checkAvailability() {
NativeCrypto.checkAvailability();
}
/**
* Indicates whether the given {@link Provider} was created by this distribution of Conscrypt.
*/
public static boolean isConscrypt(Provider provider) {
return provider instanceof OpenSSLProvider;
}
/**
* Constructs a new {@link Provider} with the default name.
*/
public static Provider newProvider() {
checkAvailability();
return new OpenSSLProvider();
}
/**
* Constructs a new {@link Provider} with the given name.
*
* @deprecated Use {@link #newProviderBuilder()} instead.
*/
@Deprecated
public static Provider newProvider(String providerName) {
checkAvailability();
return newProviderBuilder().setName(providerName).build();
}
public static class ProviderBuilder {
private String name = Platform.getDefaultProviderName();
private boolean provideTrustManager = Platform.provideTrustManagerByDefault();
private String defaultTlsProtocol = NativeCrypto.SUPPORTED_PROTOCOL_TLSV1_3;
private boolean deprecatedTlsV1 = Platform.isTlsV1Deprecated();
private boolean enabledTlsV1 = Platform.isTlsV1Supported();
private ProviderBuilder() {}
/**
* Sets the name of the Provider to be built.
*/
public ProviderBuilder setName(String name) {
this.name = name;
return this;
}
/**
* Causes the returned provider to provide an implementation of
* {@link javax.net.ssl.TrustManagerFactory}.
* @deprecated Use provideTrustManager(true)
*/
@Deprecated
@SuppressWarnings("InlineMeSuggester")
public ProviderBuilder provideTrustManager() {
return provideTrustManager(true);
}
/**
* Specifies whether the returned provider will provide an implementation of
* {@link javax.net.ssl.TrustManagerFactory}.
*/
public ProviderBuilder provideTrustManager(boolean provide) {
this.provideTrustManager = provide;
return this;
}
/**
* Specifies what the default TLS protocol should be for SSLContext identifiers
* {@code TLS}, {@code SSL}, and {@code Default}.
*/
public ProviderBuilder defaultTlsProtocol(String defaultTlsProtocol) {
this.defaultTlsProtocol = defaultTlsProtocol;
return this;
}
/** Specifies whether TLS v1.0 and 1.1 should be deprecated */
public ProviderBuilder isTlsV1Deprecated(boolean deprecatedTlsV1) {
this.deprecatedTlsV1 = deprecatedTlsV1;
return this;
}
/** Specifies whether TLS v1.0 and 1.1 should be enabled */
public ProviderBuilder isTlsV1Enabled(boolean enabledTlsV1) {
this.enabledTlsV1 = enabledTlsV1;
return this;
}
public Provider build() {
return new OpenSSLProvider(
name, provideTrustManager, defaultTlsProtocol, deprecatedTlsV1, enabledTlsV1);
}
}
public static ProviderBuilder newProviderBuilder() {
return new ProviderBuilder();
}
/**
* Returns the maximum length (in bytes) of an encrypted packet.
*/
public static int maxEncryptedPacketLength() {
return NativeConstants.SSL3_RT_MAX_PACKET_SIZE;
}
/**
* Gets the default X.509 trust manager.
*/
@ExperimentalApi
public static X509TrustManager getDefaultX509TrustManager() throws KeyManagementException {
checkAvailability();
return SSLParametersImpl.getDefaultX509TrustManager();
}
/**
* Indicates whether the given {@link SSLContext} was created by this distribution of Conscrypt.
*/
public static boolean isConscrypt(SSLContext context) {
return context.getProvider() instanceof OpenSSLProvider;
}
/**
* Constructs a new instance of the preferred {@link SSLContextSpi}.
*/
public static SSLContextSpi newPreferredSSLContextSpi() {
checkAvailability();
return OpenSSLContextImpl.getPreferred();
}
/**
* Sets the client-side persistent cache to be used by the context.
*/
public static void setClientSessionCache(SSLContext context, SSLClientSessionCache cache) {
SSLSessionContext clientContext = context.getClientSessionContext();
if (!(clientContext instanceof ClientSessionContext)) {
throw new IllegalArgumentException(
"Not a conscrypt client context: " + clientContext.getClass().getName());
}
((ClientSessionContext) clientContext).setPersistentCache(cache);
}
/**
* Sets the server-side persistent cache to be used by the context.
*/
public static void setServerSessionCache(SSLContext context, SSLServerSessionCache cache) {
SSLSessionContext serverContext = context.getServerSessionContext();
if (!(serverContext instanceof ServerSessionContext)) {
throw new IllegalArgumentException(
"Not a conscrypt client context: " + serverContext.getClass().getName());
}
((ServerSessionContext) serverContext).setPersistentCache(cache);
}
/**
* Indicates whether the given {@link SSLSocketFactory} was created by this distribution of
* Conscrypt.
*/
public static boolean isConscrypt(SSLSocketFactory factory) {
return factory instanceof OpenSSLSocketFactoryImpl;
}
private static OpenSSLSocketFactoryImpl toConscrypt(SSLSocketFactory factory) {
if (!isConscrypt(factory)) {
throw new IllegalArgumentException(
"Not a conscrypt socket factory: " + factory.getClass().getName());
}
return (OpenSSLSocketFactoryImpl) factory;
}
/**
* Configures the default socket to be created for all socket factory instances.
*/
@ExperimentalApi
public static void setUseEngineSocketByDefault(boolean useEngineSocket) {
OpenSSLSocketFactoryImpl.setUseEngineSocketByDefault(useEngineSocket);
OpenSSLServerSocketFactoryImpl.setUseEngineSocketByDefault(useEngineSocket);
}
/**
* Configures the socket to be created for the given socket factory instance.
*/
@ExperimentalApi
public static void setUseEngineSocket(SSLSocketFactory factory, boolean useEngineSocket) {
toConscrypt(factory).setUseEngineSocket(useEngineSocket);
}
/**
* Indicates whether the given {@link SSLServerSocketFactory} was created by this distribution
* of Conscrypt.
*/
public static boolean isConscrypt(SSLServerSocketFactory factory) {
return factory instanceof OpenSSLServerSocketFactoryImpl;
}
private static OpenSSLServerSocketFactoryImpl toConscrypt(SSLServerSocketFactory factory) {
if (!isConscrypt(factory)) {
throw new IllegalArgumentException(
"Not a conscrypt server socket factory: " + factory.getClass().getName());
}
return (OpenSSLServerSocketFactoryImpl) factory;
}
/**
* Configures the socket to be created for the given server socket factory instance.
*/
@ExperimentalApi
public static void setUseEngineSocket(SSLServerSocketFactory factory, boolean useEngineSocket) {
toConscrypt(factory).setUseEngineSocket(useEngineSocket);
}
/**
* Indicates whether the given {@link SSLSocket} was created by this distribution of Conscrypt.
*/
public static boolean isConscrypt(SSLSocket socket) {
return socket instanceof AbstractConscryptSocket;
}
private static AbstractConscryptSocket toConscrypt(SSLSocket socket) {
if (!isConscrypt(socket)) {
throw new IllegalArgumentException(
"Not a conscrypt socket: " + socket.getClass().getName());
}
return (AbstractConscryptSocket) socket;
}
/**
* This method enables Server Name Indication (SNI) and overrides the hostname supplied
* during socket creation. If the hostname is not a valid SNI hostname, the SNI extension
* will be omitted from the handshake.
*
* @param socket the socket
* @param hostname the desired SNI hostname, or null to disable
*/
public static void setHostname(SSLSocket socket, String hostname) {
toConscrypt(socket).setHostname(hostname);
}
/**
* Returns either the hostname supplied during socket creation or via
* {@link #setHostname(SSLSocket, String)}. No DNS resolution is attempted before
* returning the hostname.
*/
public static String getHostname(SSLSocket socket) {
return toConscrypt(socket).getHostname();
}
/**
* This method attempts to create a textual representation of the peer host or IP. Does
* not perform a reverse DNS lookup. This is typically used during session creation.
*/
public static String getHostnameOrIP(SSLSocket socket) {
return toConscrypt(socket).getHostnameOrIP();
}
/**
* This method enables session ticket support.
*
* @param socket the socket
* @param useSessionTickets True to enable session tickets
*/
public static void setUseSessionTickets(SSLSocket socket, boolean useSessionTickets) {
toConscrypt(socket).setUseSessionTickets(useSessionTickets);
}
/**
* Enables/disables TLS Channel ID for the given server-side socket.
*
* <p>This method needs to be invoked before the handshake starts.
*
* @param socket the socket
* @param enabled Whether to enable channel ID.
* @throws IllegalStateException if this is a client socket or if the handshake has already
* started.
*/
public static void setChannelIdEnabled(SSLSocket socket, boolean enabled) {
toConscrypt(socket).setChannelIdEnabled(enabled);
}
/**
* Gets the TLS Channel ID for the given server-side socket. Channel ID is only available
* once the handshake completes.
*
* @param socket the socket
* @return channel ID or {@code null} if not available.
* @throws IllegalStateException if this is a client socket or if the handshake has not yet
* completed.
* @throws SSLException if channel ID is available but could not be obtained.
*/
public static byte[] getChannelId(SSLSocket socket) throws SSLException {
return toConscrypt(socket).getChannelId();
}
/**
* Sets the {@link PrivateKey} to be used for TLS Channel ID by this client socket.
*
* <p>This method needs to be invoked before the handshake starts.
*
* @param socket the socket
* @param privateKey private key (enables TLS Channel ID) or {@code null} for no key
* (disables TLS Channel ID).
* The private key must be an Elliptic Curve (EC) key based on the NIST P-256 curve (aka
* SECG secp256r1 or ANSI
* X9.62 prime256v1).
* @throws IllegalStateException if this is a server socket or if the handshake has already
* started.
*/
public static void setChannelIdPrivateKey(SSLSocket socket, PrivateKey privateKey) {
toConscrypt(socket).setChannelIdPrivateKey(privateKey);
}
/**
* Returns the ALPN protocol agreed upon by client and server.
*
* @param socket the socket
* @return the selected protocol or {@code null} if no protocol was agreed upon.
* @throws IllegalArgumentException if the socket is not a Conscrypt socket.
*/
public static String getApplicationProtocol(SSLSocket socket) {
if (isConscrypt(socket)) {
return toConscrypt(socket).getApplicationProtocol();
}
try {
if (!Class.forName("com.android.org.conscrypt.AbstractConscryptSocket")
.isInstance(socket)) {
throw new IllegalArgumentException(
"Not a conscrypt socket: " + socket.getClass().getName());
}
return invokeConscryptMethod(socket, "getApplicationProtocol");
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(
"Not a conscrypt socket: " + socket.getClass().getName(), e);
}
}
/**
* Sets an application-provided ALPN protocol selector. If provided, this will override
* the list of protocols set by {@link #setApplicationProtocols(SSLSocket, String[])}.
*
* @param socket the socket
* @param selector the ALPN protocol selector
*/
public static void setApplicationProtocolSelector(
SSLSocket socket, ApplicationProtocolSelector selector) {
toConscrypt(socket).setApplicationProtocolSelector(selector);
}
/**
* Sets the application-layer protocols (ALPN) in prioritization order.
*
* @param socket the socket being configured
* @param protocols the protocols in descending order of preference. If empty, no protocol
* indications will be used. This array will be copied.
* @throws IllegalArgumentException - if protocols is null, or if any element in a non-empty
* array is null or an empty (zero-length) string
*/
public static void setApplicationProtocols(SSLSocket socket, String[] protocols) {
toConscrypt(socket).setApplicationProtocols(protocols);
}
/**
* Gets the application-layer protocols (ALPN) in prioritization order.
*
* @param socket the socket
* @return the protocols in descending order of preference, or an empty array if protocol
* indications are not being used. Always returns a new array.
*/
public static String[] getApplicationProtocols(SSLSocket socket) {
return toConscrypt(socket).getApplicationProtocols();
}
/**
* Returns the tls-unique channel binding value for this connection, per RFC 5929. This
* will return {@code null} if there is no such value available, such as if the handshake
* has not yet completed or this connection is closed.
*/
public static byte[] getTlsUnique(SSLSocket socket) {
return toConscrypt(socket).getTlsUnique();
}
/**
* Exports a value derived from the TLS master secret as described in RFC 5705.
*
* @param label the label to use in calculating the exported value. This must be
* an ASCII-only string.
* @param context the application-specific context value to use in calculating the
* exported value. This may be {@code null} to use no application context, which is
* treated differently than an empty byte array.
* @param length the number of bytes of keying material to return.
* @return a value of the specified length, or {@code null} if the handshake has not yet
* completed or the connection has been closed.
* @throws SSLException if the value could not be exported.
*/
public static byte[] exportKeyingMaterial(
SSLSocket socket, String label, byte[] context, int length) throws SSLException {
return toConscrypt(socket).exportKeyingMaterial(label, context, length);
}
/**
* Indicates whether the given {@link SSLEngine} was created by this distribution of Conscrypt.
*/
public static boolean isConscrypt(SSLEngine engine) {
return engine instanceof AbstractConscryptEngine;
}
private static AbstractConscryptEngine toConscrypt(SSLEngine engine) {
if (!isConscrypt(engine)) {
throw new IllegalArgumentException(
"Not a conscrypt engine: " + engine.getClass().getName());
}
return (AbstractConscryptEngine) engine;
}
/**
* Provides the given engine with the provided bufferAllocator.
* @throws IllegalArgumentException if the provided engine is not a Conscrypt engine.
* @throws IllegalStateException if the provided engine has already begun its handshake.
*/
@ExperimentalApi
public static void setBufferAllocator(SSLEngine engine, BufferAllocator bufferAllocator) {
toConscrypt(engine).setBufferAllocator(bufferAllocator);
}
/**
* Provides the given socket with the provided bufferAllocator. If the given socket is a
* Conscrypt socket but does not use buffer allocators, this method does nothing.
* @throws IllegalArgumentException if the provided socket is not a Conscrypt socket.
* @throws IllegalStateException if the provided socket has already begun its handshake.
*/
@ExperimentalApi
public static void setBufferAllocator(SSLSocket socket, BufferAllocator bufferAllocator) {
AbstractConscryptSocket s = toConscrypt(socket);
if (s instanceof ConscryptEngineSocket) {
((ConscryptEngineSocket) s).setBufferAllocator(bufferAllocator);
}
}
/**
* Configures the default {@link BufferAllocator} to be used by all future
* {@link SSLEngine} instances from this provider.
*/
@ExperimentalApi
public static void setDefaultBufferAllocator(BufferAllocator bufferAllocator) {
ConscryptEngine.setDefaultBufferAllocator(bufferAllocator);
}
/**
* This method enables Server Name Indication (SNI) and overrides the hostname supplied
* during engine creation.
*
* @param engine the engine
* @param hostname the desired SNI hostname, or {@code null} to disable
*/
public static void setHostname(SSLEngine engine, String hostname) {
toConscrypt(engine).setHostname(hostname);
}
/**
* Returns either the hostname supplied during socket creation or via
* {@link #setHostname(SSLEngine, String)}. No DNS resolution is attempted before
* returning the hostname.
*/
public static String getHostname(SSLEngine engine) {
return toConscrypt(engine).getHostname();
}
/**
* Returns the maximum overhead, in bytes, of sealing a record with SSL.
*/
public static int maxSealOverhead(SSLEngine engine) {
return toConscrypt(engine).maxSealOverhead();
}
/**
* Sets a listener on the given engine for completion of the TLS handshake
*/
public static void setHandshakeListener(SSLEngine engine, HandshakeListener handshakeListener) {
toConscrypt(engine).setHandshakeListener(handshakeListener);
}
/**
* Enables/disables TLS Channel ID for the given server-side engine.
*
* <p>This method needs to be invoked before the handshake starts.
*
* @param engine the engine
* @param enabled Whether to enable channel ID.
* @throws IllegalStateException if this is a client engine or if the handshake has already
* started.
*/
public static void setChannelIdEnabled(SSLEngine engine, boolean enabled) {
toConscrypt(engine).setChannelIdEnabled(enabled);
}
/**
* Gets the TLS Channel ID for the given server-side engine. Channel ID is only available
* once the handshake completes.
*
* @param engine the engine
* @return channel ID or {@code null} if not available.
* @throws IllegalStateException if this is a client engine or if the handshake has not yet
* completed.
* @throws SSLException if channel ID is available but could not be obtained.
*/
public static byte[] getChannelId(SSLEngine engine) throws SSLException {
return toConscrypt(engine).getChannelId();
}
/**
* Sets the {@link PrivateKey} to be used for TLS Channel ID by this client engine.
*
* <p>This method needs to be invoked before the handshake starts.
*
* @param engine the engine
* @param privateKey private key (enables TLS Channel ID) or {@code null} for no key
* (disables TLS Channel ID).
* The private key must be an Elliptic Curve (EC) key based on the NIST P-256 curve (aka
* SECG secp256r1 or ANSI X9.62 prime256v1).
* @throws IllegalStateException if this is a server engine or if the handshake has already
* started.
*/
public static void setChannelIdPrivateKey(SSLEngine engine, PrivateKey privateKey) {
toConscrypt(engine).setChannelIdPrivateKey(privateKey);
}
/**
* Extended unwrap method for multiple source and destination buffers.
*
* @param engine the target engine for the unwrap
* @param srcs the source buffers
* @param dsts the destination buffers
* @return the result of the unwrap operation
* @throws SSLException thrown if an SSL error occurred
*/
public static SSLEngineResult unwrap(SSLEngine engine, final ByteBuffer[] srcs,
final ByteBuffer[] dsts) throws SSLException {
return toConscrypt(engine).unwrap(srcs, dsts);
}
/**
* Exteneded unwrap method for multiple source and destination buffers.
*
* @param engine the target engine for the unwrap.
* @param srcs the source buffers
* @param srcsOffset the offset in the {@code srcs} array of the first source buffer
* @param srcsLength the number of source buffers starting at {@code srcsOffset}
* @param dsts the destination buffers
* @param dstsOffset the offset in the {@code dsts} array of the first destination buffer
* @param dstsLength the number of destination buffers starting at {@code dstsOffset}
* @return the result of the unwrap operation
* @throws SSLException thrown if an SSL error occurred
*/
public static SSLEngineResult unwrap(SSLEngine engine, final ByteBuffer[] srcs, int srcsOffset,
final int srcsLength, final ByteBuffer[] dsts, final int dstsOffset,
final int dstsLength) throws SSLException {
return toConscrypt(engine).unwrap(
srcs, srcsOffset, srcsLength, dsts, dstsOffset, dstsLength);
}
/**
* This method enables session ticket support.
*
* @param engine the engine
* @param useSessionTickets True to enable session tickets
*/
public static void setUseSessionTickets(SSLEngine engine, boolean useSessionTickets) {
toConscrypt(engine).setUseSessionTickets(useSessionTickets);
}
/**
* Sets the application-layer protocols (ALPN) in prioritization order.
*
* @param engine the engine being configured
* @param protocols the protocols in descending order of preference. If empty, no protocol
* indications will be used. This array will be copied.
* @throws IllegalArgumentException - if protocols is null, or if any element in a non-empty
* array is null or an empty (zero-length) string
*/
public static void setApplicationProtocols(SSLEngine engine, String[] protocols) {
toConscrypt(engine).setApplicationProtocols(protocols);
}
/**
* Gets the application-layer protocols (ALPN) in prioritization order.
*
* @param engine the engine
* @return the protocols in descending order of preference, or an empty array if protocol
* indications are not being used. Always returns a new array.
*/
public static String[] getApplicationProtocols(SSLEngine engine) {
return toConscrypt(engine).getApplicationProtocols();
}
/**
* Sets an application-provided ALPN protocol selector. If provided, this will override
* the list of protocols set by {@link #setApplicationProtocols(SSLEngine, String[])}.
*
* @param engine the engine
* @param selector the ALPN protocol selector
*/
public static void setApplicationProtocolSelector(
SSLEngine engine, ApplicationProtocolSelector selector) {
toConscrypt(engine).setApplicationProtocolSelector(selector);
}
/**
* Returns the ALPN protocol agreed upon by client and server.
*
* @param engine the engine
* @return the selected protocol or {@code null} if no protocol was agreed upon.
* @throws IllegalArgumentException if the engine is not a Conscrypt engine.
*/
public static String getApplicationProtocol(SSLEngine engine) {
if (isConscrypt(engine)) {
return toConscrypt(engine).getApplicationProtocol();
}
try {
if (!Class.forName("com.android.org.conscrypt.AbstractConscryptEngine")
.isInstance(engine)) {
throw new IllegalArgumentException(
"Not a conscrypt engine: " + engine.getClass().getName());
}
return invokeConscryptMethod(engine, "getApplicationProtocol");
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(
"Not a conscrypt engine: " + engine.getClass().getName(), e);
}
}
/**
* Returns the tls-unique channel binding value for this connection, per RFC 5929. This
* will return {@code null} if there is no such value available, such as if the handshake
* has not yet completed or this connection is closed.
*/
public static byte[] getTlsUnique(SSLEngine engine) {
return toConscrypt(engine).getTlsUnique();
}
/**
* Exports a value derived from the TLS master secret as described in RFC 5705.
*
* @param label the label to use in calculating the exported value. This must be
* an ASCII-only string.
* @param context the application-specific context value to use in calculating the
* exported value. This may be {@code null} to use no application context, which is
* treated differently than an empty byte array.
* @param length the number of bytes of keying material to return.
* @return a value of the specified length, or {@code null} if the handshake has not yet
* completed or the connection has been closed.
* @throws SSLException if the value could not be exported.
*/
public static byte[] exportKeyingMaterial(
SSLEngine engine, String label, byte[] context, int length) throws SSLException {
return toConscrypt(engine).exportKeyingMaterial(label, context, length);
}
/**
* Indicates whether the given {@link TrustManager} was created by this distribution of
* Conscrypt.
*/
public static boolean isConscrypt(TrustManager trustManager) {
return trustManager instanceof TrustManagerImpl;
}
private static TrustManagerImpl toConscrypt(TrustManager trustManager) {
if (!isConscrypt(trustManager)) {
throw new IllegalArgumentException(
"Not a Conscrypt trust manager: " + trustManager.getClass().getName());
}
return (TrustManagerImpl) trustManager;
}
/**
* Set the default hostname verifier that will be used for HTTPS endpoint identification by
* Conscrypt trust managers. If {@code null} (the default), endpoint identification will use
* the default hostname verifier set in
* {@link HttpsURLConnection#setDefaultHostnameVerifier(javax.net.ssl.HostnameVerifier)}.
*/
public synchronized static void setDefaultHostnameVerifier(ConscryptHostnameVerifier verifier) {
TrustManagerImpl.setDefaultHostnameVerifier(verifier);
}
/**
* Returns the currently-set default hostname verifier for Conscrypt trust managers.
*
* @see #setDefaultHostnameVerifier(ConscryptHostnameVerifier)
*/
public synchronized static ConscryptHostnameVerifier getDefaultHostnameVerifier(
TrustManager trustManager) {
return TrustManagerImpl.getDefaultHostnameVerifier();
}
/**
* Set the hostname verifier that will be used for HTTPS endpoint identification by the
* given trust manager. If {@code null} (the default), endpoint identification will use the
* default hostname verifier set in {@link
* #setDefaultHostnameVerifier(ConscryptHostnameVerifier)}.
*
* @throws IllegalArgumentException if the provided trust manager is not a Conscrypt trust
* manager per {@link #isConscrypt(TrustManager)}
*/
public static void setHostnameVerifier(
TrustManager trustManager, ConscryptHostnameVerifier verifier) {
toConscrypt(trustManager).setHostnameVerifier(verifier);
}
/**
* Returns the currently-set hostname verifier for the given trust manager.
*
* @throws IllegalArgumentException if the provided trust manager is not a Conscrypt trust
* manager per {@link #isConscrypt(TrustManager)}
*
* @see #setHostnameVerifier(TrustManager, ConscryptHostnameVerifier)
*/
public static ConscryptHostnameVerifier getHostnameVerifier(TrustManager trustManager) {
return toConscrypt(trustManager).getHostnameVerifier();
}
/**
* Wraps the HttpsURLConnection.HostnameVerifier into a ConscryptHostnameVerifier
*/
public static ConscryptHostnameVerifier wrapHostnameVerifier(final HostnameVerifier verifier) {
return new ConscryptHostnameVerifier() {
@Override
public boolean verify(
X509Certificate[] certificates, String hostname, SSLSession session) {
return verifier.verify(hostname, session);
}
};
}
/**
* Generic helper method for invoking methods on potentially non-Conscrypt SSLSocket/SSLEngine
* instances via reflection.
*
* @param instance The SSLSocket or SSLEngine instance.
* @param methodName The name of the method to invoke.
* @return String.
* @throws IllegalArgumentException if the method cannot be invoked or throws a checked
* exception.
* @throws IllegalStateException if the method throws an IllegalStateException.
* @throws RuntimeException if the method throws a RuntimeException.
* @throws Error if the method throws an Error.
*/
private static String invokeConscryptMethod(Object instance, String methodName)
throws IllegalArgumentException {
try {
Method method = instance.getClass().getMethod(methodName);
Object result = method.invoke(instance);
return (String) result;
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof SSLException || cause instanceof IOException) {
IllegalArgumentException wrapped = new IllegalArgumentException(
"Reflected method '" + methodName + "' threw a checked exception.", cause);
throw wrapped;
} else if (cause instanceof IllegalStateException) {
throw (IllegalStateException) cause;
} else if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else if (cause instanceof Error) {
throw (Error) cause;
} else {
throw new RuntimeException(
"Reflected method '" + methodName + "' threw an unexpected exception",
cause);
}
} catch (Exception e) {
String className = instance.getClass().getName();
IllegalArgumentException wrapped = new IllegalArgumentException(
"Failed reflection fallback for method '" + methodName + "' on class '"
+ className + ", message: " + e.getMessage(),
e);
throw wrapped;
}
}
}
|
googleapis/google-cloud-java | 35,535 | java-automl/proto-google-cloud-automl-v1beta1/src/main/java/com/google/cloud/automl/v1beta1/ListModelsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.automl.v1beta1;
/**
*
*
* <pre>
* Response message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListModelsResponse}
*/
public final class ListModelsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.ListModelsResponse)
ListModelsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListModelsResponse.newBuilder() to construct.
private ListModelsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListModelsResponse() {
model_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListModelsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListModelsResponse.class,
com.google.cloud.automl.v1beta1.ListModelsResponse.Builder.class);
}
public static final int MODEL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.automl.v1beta1.Model> model_;
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.automl.v1beta1.Model> getModelList() {
return model_;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.automl.v1beta1.ModelOrBuilder>
getModelOrBuilderList() {
return model_;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
@java.lang.Override
public int getModelCount() {
return model_.size();
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1beta1.Model getModel(int index) {
return model_.get(index);
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1beta1.ModelOrBuilder getModelOrBuilder(int index) {
return model_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < model_.size(); i++) {
output.writeMessage(1, model_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < model_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, model_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1beta1.ListModelsResponse)) {
return super.equals(obj);
}
com.google.cloud.automl.v1beta1.ListModelsResponse other =
(com.google.cloud.automl.v1beta1.ListModelsResponse) obj;
if (!getModelList().equals(other.getModelList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getModelCount() > 0) {
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModelList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1beta1.ListModelsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListModelsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.ListModelsResponse)
com.google.cloud.automl.v1beta1.ListModelsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListModelsResponse.class,
com.google.cloud.automl.v1beta1.ListModelsResponse.Builder.class);
}
// Construct using com.google.cloud.automl.v1beta1.ListModelsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (modelBuilder_ == null) {
model_ = java.util.Collections.emptyList();
} else {
model_ = null;
modelBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsResponse getDefaultInstanceForType() {
return com.google.cloud.automl.v1beta1.ListModelsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsResponse build() {
com.google.cloud.automl.v1beta1.ListModelsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsResponse buildPartial() {
com.google.cloud.automl.v1beta1.ListModelsResponse result =
new com.google.cloud.automl.v1beta1.ListModelsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.automl.v1beta1.ListModelsResponse result) {
if (modelBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
model_ = java.util.Collections.unmodifiableList(model_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.model_ = model_;
} else {
result.model_ = modelBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.automl.v1beta1.ListModelsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1beta1.ListModelsResponse) {
return mergeFrom((com.google.cloud.automl.v1beta1.ListModelsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1beta1.ListModelsResponse other) {
if (other == com.google.cloud.automl.v1beta1.ListModelsResponse.getDefaultInstance())
return this;
if (modelBuilder_ == null) {
if (!other.model_.isEmpty()) {
if (model_.isEmpty()) {
model_ = other.model_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureModelIsMutable();
model_.addAll(other.model_);
}
onChanged();
}
} else {
if (!other.model_.isEmpty()) {
if (modelBuilder_.isEmpty()) {
modelBuilder_.dispose();
modelBuilder_ = null;
model_ = other.model_;
bitField0_ = (bitField0_ & ~0x00000001);
modelBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getModelFieldBuilder()
: null;
} else {
modelBuilder_.addAllMessages(other.model_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.automl.v1beta1.Model m =
input.readMessage(
com.google.cloud.automl.v1beta1.Model.parser(), extensionRegistry);
if (modelBuilder_ == null) {
ensureModelIsMutable();
model_.add(m);
} else {
modelBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.automl.v1beta1.Model> model_ =
java.util.Collections.emptyList();
private void ensureModelIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
model_ = new java.util.ArrayList<com.google.cloud.automl.v1beta1.Model>(model_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>
modelBuilder_;
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1beta1.Model> getModelList() {
if (modelBuilder_ == null) {
return java.util.Collections.unmodifiableList(model_);
} else {
return modelBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public int getModelCount() {
if (modelBuilder_ == null) {
return model_.size();
} else {
return modelBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Model getModel(int index) {
if (modelBuilder_ == null) {
return model_.get(index);
} else {
return modelBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder setModel(int index, com.google.cloud.automl.v1beta1.Model value) {
if (modelBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelIsMutable();
model_.set(index, value);
onChanged();
} else {
modelBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder setModel(
int index, com.google.cloud.automl.v1beta1.Model.Builder builderForValue) {
if (modelBuilder_ == null) {
ensureModelIsMutable();
model_.set(index, builderForValue.build());
onChanged();
} else {
modelBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder addModel(com.google.cloud.automl.v1beta1.Model value) {
if (modelBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelIsMutable();
model_.add(value);
onChanged();
} else {
modelBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder addModel(int index, com.google.cloud.automl.v1beta1.Model value) {
if (modelBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelIsMutable();
model_.add(index, value);
onChanged();
} else {
modelBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder addModel(com.google.cloud.automl.v1beta1.Model.Builder builderForValue) {
if (modelBuilder_ == null) {
ensureModelIsMutable();
model_.add(builderForValue.build());
onChanged();
} else {
modelBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder addModel(
int index, com.google.cloud.automl.v1beta1.Model.Builder builderForValue) {
if (modelBuilder_ == null) {
ensureModelIsMutable();
model_.add(index, builderForValue.build());
onChanged();
} else {
modelBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder addAllModel(
java.lang.Iterable<? extends com.google.cloud.automl.v1beta1.Model> values) {
if (modelBuilder_ == null) {
ensureModelIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, model_);
onChanged();
} else {
modelBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder clearModel() {
if (modelBuilder_ == null) {
model_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
modelBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public Builder removeModel(int index) {
if (modelBuilder_ == null) {
ensureModelIsMutable();
model_.remove(index);
onChanged();
} else {
modelBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Model.Builder getModelBuilder(int index) {
return getModelFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public com.google.cloud.automl.v1beta1.ModelOrBuilder getModelOrBuilder(int index) {
if (modelBuilder_ == null) {
return model_.get(index);
} else {
return modelBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public java.util.List<? extends com.google.cloud.automl.v1beta1.ModelOrBuilder>
getModelOrBuilderList() {
if (modelBuilder_ != null) {
return modelBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(model_);
}
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Model.Builder addModelBuilder() {
return getModelFieldBuilder()
.addBuilder(com.google.cloud.automl.v1beta1.Model.getDefaultInstance());
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public com.google.cloud.automl.v1beta1.Model.Builder addModelBuilder(int index) {
return getModelFieldBuilder()
.addBuilder(index, com.google.cloud.automl.v1beta1.Model.getDefaultInstance());
}
/**
*
*
* <pre>
* List of models in the requested page.
* </pre>
*
* <code>repeated .google.cloud.automl.v1beta1.Model model = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1beta1.Model.Builder> getModelBuilderList() {
return getModelFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>
getModelFieldBuilder() {
if (modelBuilder_ == null) {
modelBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>(
model_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
model_ = null;
}
return modelBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.ListModelsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.ListModelsResponse)
private static final com.google.cloud.automl.v1beta1.ListModelsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.ListModelsResponse();
}
public static com.google.cloud.automl.v1beta1.ListModelsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListModelsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListModelsResponse>() {
@java.lang.Override
public ListModelsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListModelsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListModelsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,599 | java-video-stitcher/proto-google-cloud-video-stitcher-v1/src/main/java/com/google/cloud/video/stitcher/v1/UpdateVodConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/stitcher/v1/video_stitcher_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.video.stitcher.v1;
/**
*
*
* <pre>
* Request message for VideoStitcherService.updateVodConfig.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.UpdateVodConfigRequest}
*/
public final class UpdateVodConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.UpdateVodConfigRequest)
UpdateVodConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateVodConfigRequest.newBuilder() to construct.
private UpdateVodConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateVodConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateVodConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateVodConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateVodConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.class,
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.Builder.class);
}
private int bitField0_;
public static final int VOD_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.video.stitcher.v1.VodConfig vodConfig_;
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the vodConfig field is set.
*/
@java.lang.Override
public boolean hasVodConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The vodConfig.
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.VodConfig getVodConfig() {
return vodConfig_ == null
? com.google.cloud.video.stitcher.v1.VodConfig.getDefaultInstance()
: vodConfig_;
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.VodConfigOrBuilder getVodConfigOrBuilder() {
return vodConfig_ == null
? com.google.cloud.video.stitcher.v1.VodConfig.getDefaultInstance()
: vodConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getVodConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVodConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest other =
(com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest) obj;
if (hasVodConfig() != other.hasVodConfig()) return false;
if (hasVodConfig()) {
if (!getVodConfig().equals(other.getVodConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVodConfig()) {
hash = (37 * hash) + VOD_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getVodConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for VideoStitcherService.updateVodConfig.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.UpdateVodConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.UpdateVodConfigRequest)
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateVodConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateVodConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.class,
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.Builder.class);
}
// Construct using com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVodConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
vodConfig_ = null;
if (vodConfigBuilder_ != null) {
vodConfigBuilder_.dispose();
vodConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateVodConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest getDefaultInstanceForType() {
return com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest build() {
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest buildPartial() {
com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest result =
new com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.vodConfig_ = vodConfigBuilder_ == null ? vodConfig_ : vodConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest) {
return mergeFrom((com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest other) {
if (other == com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest.getDefaultInstance())
return this;
if (other.hasVodConfig()) {
mergeVodConfig(other.getVodConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVodConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.video.stitcher.v1.VodConfig vodConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.VodConfig,
com.google.cloud.video.stitcher.v1.VodConfig.Builder,
com.google.cloud.video.stitcher.v1.VodConfigOrBuilder>
vodConfigBuilder_;
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the vodConfig field is set.
*/
public boolean hasVodConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The vodConfig.
*/
public com.google.cloud.video.stitcher.v1.VodConfig getVodConfig() {
if (vodConfigBuilder_ == null) {
return vodConfig_ == null
? com.google.cloud.video.stitcher.v1.VodConfig.getDefaultInstance()
: vodConfig_;
} else {
return vodConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVodConfig(com.google.cloud.video.stitcher.v1.VodConfig value) {
if (vodConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
vodConfig_ = value;
} else {
vodConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVodConfig(
com.google.cloud.video.stitcher.v1.VodConfig.Builder builderForValue) {
if (vodConfigBuilder_ == null) {
vodConfig_ = builderForValue.build();
} else {
vodConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeVodConfig(com.google.cloud.video.stitcher.v1.VodConfig value) {
if (vodConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& vodConfig_ != null
&& vodConfig_ != com.google.cloud.video.stitcher.v1.VodConfig.getDefaultInstance()) {
getVodConfigBuilder().mergeFrom(value);
} else {
vodConfig_ = value;
}
} else {
vodConfigBuilder_.mergeFrom(value);
}
if (vodConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearVodConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
vodConfig_ = null;
if (vodConfigBuilder_ != null) {
vodConfigBuilder_.dispose();
vodConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.video.stitcher.v1.VodConfig.Builder getVodConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getVodConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.video.stitcher.v1.VodConfigOrBuilder getVodConfigOrBuilder() {
if (vodConfigBuilder_ != null) {
return vodConfigBuilder_.getMessageOrBuilder();
} else {
return vodConfig_ == null
? com.google.cloud.video.stitcher.v1.VodConfig.getDefaultInstance()
: vodConfig_;
}
}
/**
*
*
* <pre>
* Required. The VOD config resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.VodConfig vod_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.VodConfig,
com.google.cloud.video.stitcher.v1.VodConfig.Builder,
com.google.cloud.video.stitcher.v1.VodConfigOrBuilder>
getVodConfigFieldBuilder() {
if (vodConfigBuilder_ == null) {
vodConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.VodConfig,
com.google.cloud.video.stitcher.v1.VodConfig.Builder,
com.google.cloud.video.stitcher.v1.VodConfigOrBuilder>(
getVodConfig(), getParentForChildren(), isClean());
vodConfig_ = null;
}
return vodConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.UpdateVodConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.UpdateVodConfigRequest)
private static final com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest();
}
public static com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateVodConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateVodConfigRequest>() {
@java.lang.Override
public UpdateVodConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateVodConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateVodConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateVodConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 35,932 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/time/chrono/AbstractChronology.java | /*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.chrono;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_YEAR;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR;
import static java.time.temporal.ChronoField.DAY_OF_MONTH;
import static java.time.temporal.ChronoField.DAY_OF_WEEK;
import static java.time.temporal.ChronoField.DAY_OF_YEAR;
import static java.time.temporal.ChronoField.EPOCH_DAY;
import static java.time.temporal.ChronoField.ERA;
import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
import static java.time.temporal.ChronoField.PROLEPTIC_MONTH;
import static java.time.temporal.ChronoField.YEAR;
import static java.time.temporal.ChronoField.YEAR_OF_ERA;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.MONTHS;
import static java.time.temporal.ChronoUnit.WEEKS;
import static java.time.temporal.TemporalAdjusters.nextOrSame;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectStreamException;
import java.time.DateTimeException;
import java.time.DayOfWeek;
import java.time.format.ResolverStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAdjusters;
import java.time.temporal.TemporalField;
import java.time.temporal.ValueRange;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import sun.util.logging.PlatformLogger;
/**
* An abstract implementation of a calendar system, used to organize and identify dates.
* <p>
* The main date and time API is built on the ISO calendar system.
* The chronology operates behind the scenes to represent the general concept of a calendar system.
* <p>
* See {@link Chronology} for more details.
*
* @implSpec
* This class is separated from the {@code Chronology} interface so that the static methods
* are not inherited. While {@code Chronology} can be implemented directly, it is strongly
* recommended to extend this abstract class instead.
* <p>
* This class must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* Subclasses should be Serializable wherever possible.
*
* @since 1.8
*/
public abstract class AbstractChronology implements Chronology {
/**
* Map of available calendars by ID.
*/
private static final ConcurrentHashMap<String, Chronology> CHRONOS_BY_ID = new ConcurrentHashMap<>();
/**
* Map of available calendars by calendar type.
*/
private static final ConcurrentHashMap<String, Chronology> CHRONOS_BY_TYPE = new ConcurrentHashMap<>();
/**
* Register a Chronology by its ID and type for lookup by {@link #of(String)}.
* Chronologies must not be registered until they are completely constructed.
* Specifically, not in the constructor of Chronology.
*
* @param chrono the chronology to register; not null
* @return the already registered Chronology if any, may be null
*/
static Chronology registerChrono(Chronology chrono) {
return registerChrono(chrono, chrono.getId());
}
/**
* Register a Chronology by ID and type for lookup by {@link #of(String)}.
* Chronos must not be registered until they are completely constructed.
* Specifically, not in the constructor of Chronology.
*
* @param chrono the chronology to register; not null
* @param id the ID to register the chronology; not null
* @return the already registered Chronology if any, may be null
*/
static Chronology registerChrono(Chronology chrono, String id) {
Chronology prev = CHRONOS_BY_ID.putIfAbsent(id, chrono);
if (prev == null) {
String type = chrono.getCalendarType();
if (type != null) {
CHRONOS_BY_TYPE.putIfAbsent(type, chrono);
}
}
return prev;
}
/**
* Initialization of the maps from id and type to Chronology.
* The ServiceLoader is used to find and register any implementations
* of {@link java.time.chrono.AbstractChronology} found in the bootclass loader.
* The built-in chronologies are registered explicitly.
* Calendars configured via the Thread's context classloader are local
* to that thread and are ignored.
* <p>
* The initialization is done only once using the registration
* of the IsoChronology as the test and the final step.
* Multiple threads may perform the initialization concurrently.
* Only the first registration of each Chronology is retained by the
* ConcurrentHashMap.
* @return true if the cache was initialized
*/
private static boolean initCache() {
if (CHRONOS_BY_ID.get("ISO") == null) {
// Initialization is incomplete
// Register built-in Chronologies
/* /* J2ObjC removed: Only "gregorian" and "julian" calendars are supported.
registerChrono(HijrahChronology.INSTANCE);
registerChrono(JapaneseChronology.INSTANCE);
registerChrono(MinguoChronology.INSTANCE);
registerChrono(ThaiBuddhistChronology.INSTANCE); */
// Register Chronologies from the ServiceLoader
@SuppressWarnings("rawtypes")
ServiceLoader<AbstractChronology> loader = ServiceLoader.load(AbstractChronology.class, null);
for (AbstractChronology chrono : loader) {
String id = chrono.getId();
if (id.equals("ISO") || registerChrono(chrono) != null) {
// Log the attempt to replace an existing Chronology
PlatformLogger logger = PlatformLogger.getLogger("java.time.chrono");
logger.warning("Ignoring duplicate Chronology, from ServiceLoader configuration " + id);
}
}
// finally, register IsoChronology to mark initialization is complete
registerChrono(IsoChronology.INSTANCE);
return true;
}
return false;
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a locale.
* <p>
* See {@link Chronology#ofLocale(Locale)}.
*
* @param locale the locale to use to obtain the calendar system, not null
* @return the calendar system associated with the locale, not null
* @throws java.time.DateTimeException if the locale-specified calendar cannot be found
*/
static Chronology ofLocale(Locale locale) {
Objects.requireNonNull(locale, "locale");
String type = locale.getUnicodeLocaleType("ca");
if (type == null || "iso".equals(type) || "iso8601".equals(type)) {
return IsoChronology.INSTANCE;
}
// Not pre-defined; lookup by the type
do {
Chronology chrono = CHRONOS_BY_TYPE.get(type);
if (chrono != null) {
return chrono;
}
// If not found, do the initialization (once) and repeat the lookup
} while (initCache());
// Look for a Chronology using ServiceLoader of the Thread's ContextClassLoader
// Application provided Chronologies must not be cached
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
if (type.equals(chrono.getCalendarType())) {
return chrono;
}
}
throw new DateTimeException("Unknown calendar system: " + type);
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a chronology ID or
* calendar system type.
* <p>
* See {@link Chronology#of(String)}.
*
* @param id the chronology ID or calendar system type, not null
* @return the chronology with the identifier requested, not null
* @throws java.time.DateTimeException if the chronology cannot be found
*/
static Chronology of(String id) {
Objects.requireNonNull(id, "id");
do {
Chronology chrono = of0(id);
if (chrono != null) {
return chrono;
}
// If not found, do the initialization (once) and repeat the lookup
} while (initCache());
// Look for a Chronology using ServiceLoader of the Thread's ContextClassLoader
// Application provided Chronologies must not be cached
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
if (id.equals(chrono.getId()) || id.equals(chrono.getCalendarType())) {
return chrono;
}
}
throw new DateTimeException("Unknown chronology: " + id);
}
/**
* Obtains an instance of {@code Chronology} from a chronology ID or
* calendar system type.
*
* @param id the chronology ID or calendar system type, not null
* @return the chronology with the identifier requested, or {@code null} if not found
*/
private static Chronology of0(String id) {
Chronology chrono = CHRONOS_BY_ID.get(id);
if (chrono == null) {
chrono = CHRONOS_BY_TYPE.get(id);
}
return chrono;
}
/**
* Returns the available chronologies.
* <p>
* Each returned {@code Chronology} is available for use in the system.
* The set of chronologies includes the system chronologies and
* any chronologies provided by the application via ServiceLoader
* configuration.
*
* @return the independent, modifiable set of the available chronology IDs, not null
*/
static Set<Chronology> getAvailableChronologies() {
initCache(); // force initialization
HashSet<Chronology> chronos = new HashSet<>(CHRONOS_BY_ID.values());
/// Add in Chronologies from the ServiceLoader configuration
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
chronos.add(chrono);
}
return chronos;
}
//-----------------------------------------------------------------------
/**
* Creates an instance.
*/
protected AbstractChronology() {
}
//-----------------------------------------------------------------------
/**
* Resolves parsed {@code ChronoField} values into a date during parsing.
* <p>
* Most {@code TemporalField} implementations are resolved using the
* resolve method on the field. By contrast, the {@code ChronoField} class
* defines fields that only have meaning relative to the chronology.
* As such, {@code ChronoField} date fields are resolved here in the
* context of a specific chronology.
* <p>
* {@code ChronoField} instances are resolved by this method, which may
* be overridden in subclasses.
* <ul>
* <li>{@code EPOCH_DAY} - If present, this is converted to a date and
* all other date fields are then cross-checked against the date.
* <li>{@code PROLEPTIC_MONTH} - If present, then it is split into the
* {@code YEAR} and {@code MONTH_OF_YEAR}. If the mode is strict or smart
* then the field is validated.
* <li>{@code YEAR_OF_ERA} and {@code ERA} - If both are present, then they
* are combined to form a {@code YEAR}. In lenient mode, the {@code YEAR_OF_ERA}
* range is not validated, in smart and strict mode it is. The {@code ERA} is
* validated for range in all three modes. If only the {@code YEAR_OF_ERA} is
* present, and the mode is smart or lenient, then the last available era
* is assumed. In strict mode, no era is assumed and the {@code YEAR_OF_ERA} is
* left untouched. If only the {@code ERA} is present, then it is left untouched.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} -
* If all three are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is smart or strict, then the month and day are validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the first month in the requested year,
* then adding the difference in months, then the difference in days.
* If the mode is smart, and the day-of-month is greater than the maximum for
* the year-month, then the day-of-month is adjusted to the last day-of-month.
* If the mode is strict, then the three fields must form a valid date.
* <li>{@code YEAR} and {@code DAY_OF_YEAR} -
* If both are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the difference in days.
* If the mode is smart or strict, then the two fields must form a valid date.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR}, {@code ALIGNED_WEEK_OF_MONTH} and
* {@code ALIGNED_DAY_OF_WEEK_IN_MONTH} -
* If all four are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the first month in the requested year, then adding
* the difference in months, then the difference in weeks, then in days.
* If the mode is smart or strict, then the all four fields are validated to
* their outer ranges. The date is then combined in a manner equivalent to
* creating a date on the first day of the requested year and month, then adding
* the amount in weeks and days to reach their values. If the mode is strict,
* the date is additionally validated to check that the day and week adjustment
* did not change the month.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR}, {@code ALIGNED_WEEK_OF_MONTH} and
* {@code DAY_OF_WEEK} - If all four are present, then they are combined to
* form a date. The approach is the same as described above for
* years, months and weeks in {@code ALIGNED_DAY_OF_WEEK_IN_MONTH}.
* The day-of-week is adjusted as the next or same matching day-of-week once
* the years, months and weeks have been handled.
* <li>{@code YEAR}, {@code ALIGNED_WEEK_OF_YEAR} and {@code ALIGNED_DAY_OF_WEEK_IN_YEAR} -
* If all three are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the difference in weeks, then in days.
* If the mode is smart or strict, then the all three fields are validated to
* their outer ranges. The date is then combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the amount in weeks and days to reach their values. If the mode is strict,
* the date is additionally validated to check that the day and week adjustment
* did not change the year.
* <li>{@code YEAR}, {@code ALIGNED_WEEK_OF_YEAR} and {@code DAY_OF_WEEK} -
* If all three are present, then they are combined to form a date.
* The approach is the same as described above for years and weeks in
* {@code ALIGNED_DAY_OF_WEEK_IN_YEAR}. The day-of-week is adjusted as the
* next or same matching day-of-week once the years and weeks have been handled.
* </ul>
* <p>
* The default implementation is suitable for most calendar systems.
* If {@link java.time.temporal.ChronoField#YEAR_OF_ERA} is found without an {@link java.time.temporal.ChronoField#ERA}
* then the last era in {@link #eras()} is used.
* The implementation assumes a 7 day week, that the first day-of-month
* has the value 1, that first day-of-year has the value 1, and that the
* first of the month and year always exists.
*
* @param fieldValues the map of fields to values, which can be updated, not null
* @param resolverStyle the requested type of resolve, not null
* @return the resolved date, null if insufficient information to create a date
* @throws java.time.DateTimeException if the date cannot be resolved, typically
* because of a conflict in the input data
*/
@Override
public ChronoLocalDate resolveDate(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
// check epoch-day before inventing era
if (fieldValues.containsKey(EPOCH_DAY)) {
return dateEpochDay(fieldValues.remove(EPOCH_DAY));
}
// fix proleptic month before inventing era
resolveProlepticMonth(fieldValues, resolverStyle);
// invent era if necessary to resolve year-of-era
ChronoLocalDate resolved = resolveYearOfEra(fieldValues, resolverStyle);
if (resolved != null) {
return resolved;
}
// build date
if (fieldValues.containsKey(YEAR)) {
if (fieldValues.containsKey(MONTH_OF_YEAR)) {
if (fieldValues.containsKey(DAY_OF_MONTH)) {
return resolveYMD(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(ALIGNED_WEEK_OF_MONTH)) {
if (fieldValues.containsKey(ALIGNED_DAY_OF_WEEK_IN_MONTH)) {
return resolveYMAA(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(DAY_OF_WEEK)) {
return resolveYMAD(fieldValues, resolverStyle);
}
}
}
if (fieldValues.containsKey(DAY_OF_YEAR)) {
return resolveYD(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(ALIGNED_WEEK_OF_YEAR)) {
if (fieldValues.containsKey(ALIGNED_DAY_OF_WEEK_IN_YEAR)) {
return resolveYAA(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(DAY_OF_WEEK)) {
return resolveYAD(fieldValues, resolverStyle);
}
}
}
return null;
}
void resolveProlepticMonth(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
Long pMonth = fieldValues.remove(PROLEPTIC_MONTH);
if (pMonth != null) {
if (resolverStyle != ResolverStyle.LENIENT) {
PROLEPTIC_MONTH.checkValidValue(pMonth);
}
// first day-of-month is likely to be safest for setting proleptic-month
// cannot add to year zero, as not all chronologies have a year zero
ChronoLocalDate chronoDate = dateNow()
.with(DAY_OF_MONTH, 1).with(PROLEPTIC_MONTH, pMonth);
addFieldValue(fieldValues, MONTH_OF_YEAR, chronoDate.get(MONTH_OF_YEAR));
addFieldValue(fieldValues, YEAR, chronoDate.get(YEAR));
}
}
ChronoLocalDate resolveYearOfEra(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
Long yoeLong = fieldValues.remove(YEAR_OF_ERA);
if (yoeLong != null) {
Long eraLong = fieldValues.remove(ERA);
int yoe;
if (resolverStyle != ResolverStyle.LENIENT) {
yoe = range(YEAR_OF_ERA).checkValidIntValue(yoeLong, YEAR_OF_ERA);
} else {
yoe = Math.toIntExact(yoeLong);
}
if (eraLong != null) {
Era eraObj = eraOf(range(ERA).checkValidIntValue(eraLong, ERA));
addFieldValue(fieldValues, YEAR, prolepticYear(eraObj, yoe));
} else {
if (fieldValues.containsKey(YEAR)) {
int year = range(YEAR).checkValidIntValue(fieldValues.get(YEAR), YEAR);
ChronoLocalDate chronoDate = dateYearDay(year, 1);
addFieldValue(fieldValues, YEAR, prolepticYear(chronoDate.getEra(), yoe));
} else if (resolverStyle == ResolverStyle.STRICT) {
// do not invent era if strict
// reinstate the field removed earlier, no cross-check issues
fieldValues.put(YEAR_OF_ERA, yoeLong);
} else {
List<Era> eras = eras();
if (eras.isEmpty()) {
addFieldValue(fieldValues, YEAR, yoe);
} else {
Era eraObj = eras.get(eras.size() - 1);
addFieldValue(fieldValues, YEAR, prolepticYear(eraObj, yoe));
}
}
}
} else if (fieldValues.containsKey(ERA)) {
range(ERA).checkValidValue(fieldValues.get(ERA), ERA); // always validated
}
return null;
}
ChronoLocalDate resolveYMD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long days = Math.subtractExact(fieldValues.remove(DAY_OF_MONTH), 1);
return date(y, 1, 1).plus(months, MONTHS).plus(days, DAYS);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
ValueRange domRange = range(DAY_OF_MONTH);
int dom = domRange.checkValidIntValue(fieldValues.remove(DAY_OF_MONTH), DAY_OF_MONTH);
if (resolverStyle == ResolverStyle.SMART) { // previous valid
try {
return date(y, moy, dom);
} catch (DateTimeException ex) {
return date(y, moy, 1).with(TemporalAdjusters.lastDayOfMonth());
}
}
return date(y, moy, dom);
}
ChronoLocalDate resolveYD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long days = Math.subtractExact(fieldValues.remove(DAY_OF_YEAR), 1);
return dateYearDay(y, 1).plus(days, DAYS);
}
int doy = range(DAY_OF_YEAR).checkValidIntValue(fieldValues.remove(DAY_OF_YEAR), DAY_OF_YEAR);
return dateYearDay(y, doy); // smart is same as strict
}
ChronoLocalDate resolveYMAA(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), 1);
long days = Math.subtractExact(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_MONTH), 1);
return date(y, 1, 1).plus(months, MONTHS).plus(weeks, WEEKS).plus(days, DAYS);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
int aw = range(ALIGNED_WEEK_OF_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), ALIGNED_WEEK_OF_MONTH);
int ad = range(ALIGNED_DAY_OF_WEEK_IN_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_MONTH), ALIGNED_DAY_OF_WEEK_IN_MONTH);
ChronoLocalDate date = date(y, moy, 1).plus((aw - 1) * 7 + (ad - 1), DAYS);
if (resolverStyle == ResolverStyle.STRICT && date.get(MONTH_OF_YEAR) != moy) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different month");
}
return date;
}
ChronoLocalDate resolveYMAD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), 1);
long dow = Math.subtractExact(fieldValues.remove(DAY_OF_WEEK), 1);
return resolveAligned(date(y, 1, 1), months, weeks, dow);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
int aw = range(ALIGNED_WEEK_OF_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), ALIGNED_WEEK_OF_MONTH);
int dow = range(DAY_OF_WEEK).checkValidIntValue(fieldValues.remove(DAY_OF_WEEK), DAY_OF_WEEK);
ChronoLocalDate date = date(y, moy, 1).plus((aw - 1) * 7, DAYS).with(nextOrSame(DayOfWeek.of(dow)));
if (resolverStyle == ResolverStyle.STRICT && date.get(MONTH_OF_YEAR) != moy) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different month");
}
return date;
}
ChronoLocalDate resolveYAA(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), 1);
long days = Math.subtractExact(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_YEAR), 1);
return dateYearDay(y, 1).plus(weeks, WEEKS).plus(days, DAYS);
}
int aw = range(ALIGNED_WEEK_OF_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), ALIGNED_WEEK_OF_YEAR);
int ad = range(ALIGNED_DAY_OF_WEEK_IN_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_YEAR), ALIGNED_DAY_OF_WEEK_IN_YEAR);
ChronoLocalDate date = dateYearDay(y, 1).plus((aw - 1) * 7 + (ad - 1), DAYS);
if (resolverStyle == ResolverStyle.STRICT && date.get(YEAR) != y) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different year");
}
return date;
}
ChronoLocalDate resolveYAD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), 1);
long dow = Math.subtractExact(fieldValues.remove(DAY_OF_WEEK), 1);
return resolveAligned(dateYearDay(y, 1), 0, weeks, dow);
}
int aw = range(ALIGNED_WEEK_OF_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), ALIGNED_WEEK_OF_YEAR);
int dow = range(DAY_OF_WEEK).checkValidIntValue(fieldValues.remove(DAY_OF_WEEK), DAY_OF_WEEK);
ChronoLocalDate date = dateYearDay(y, 1).plus((aw - 1) * 7, DAYS).with(nextOrSame(DayOfWeek.of(dow)));
if (resolverStyle == ResolverStyle.STRICT && date.get(YEAR) != y) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different year");
}
return date;
}
ChronoLocalDate resolveAligned(ChronoLocalDate base, long months, long weeks, long dow) {
ChronoLocalDate date = base.plus(months, MONTHS).plus(weeks, WEEKS);
if (dow > 7) {
date = date.plus((dow - 1) / 7, WEEKS);
dow = ((dow - 1) % 7) + 1;
} else if (dow < 1) {
date = date.plus(Math.subtractExact(dow, 7) / 7, WEEKS);
dow = ((dow + 6) % 7) + 1;
}
return date.with(nextOrSame(DayOfWeek.of((int) dow)));
}
/**
* Adds a field-value pair to the map, checking for conflicts.
* <p>
* If the field is not already present, then the field-value pair is added to the map.
* If the field is already present and it has the same value as that specified, no action occurs.
* If the field is already present and it has a different value to that specified, then
* an exception is thrown.
*
* @param field the field to add, not null
* @param value the value to add, not null
* @throws java.time.DateTimeException if the field is already present with a different value
*/
void addFieldValue(Map<TemporalField, Long> fieldValues, ChronoField field, long value) {
Long old = fieldValues.get(field); // check first for better error message
if (old != null && old.longValue() != value) {
throw new DateTimeException("Conflict found: " + field + " " + old + " differs from " + field + " " + value);
}
fieldValues.put(field, value);
}
//-----------------------------------------------------------------------
/**
* Compares this chronology to another chronology.
* <p>
* The comparison order first by the chronology ID string, then by any
* additional information specific to the subclass.
* It is "consistent with equals", as defined by {@link Comparable}.
*
* @implSpec
* This implementation compares the chronology ID.
* Subclasses must compare any additional state that they store.
*
* @param other the other chronology to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
@Override
public int compareTo(Chronology other) {
return getId().compareTo(other.getId());
}
/**
* Checks if this chronology is equal to another chronology.
* <p>
* The comparison is based on the entire state of the object.
*
* @implSpec
* This implementation checks the type and calls
* {@link #compareTo(java.time.chrono.Chronology)}.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other chronology
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof AbstractChronology) {
return compareTo((AbstractChronology) obj) == 0;
}
return false;
}
/**
* A hash code for this chronology.
* <p>
* The hash code should be based on the entire state of the object.
*
* @implSpec
* This implementation is based on the chronology ID and class.
* Subclasses should add any additional state that they store.
*
* @return a suitable hash code
*/
@Override
public int hashCode() {
return getClass().hashCode() ^ getId().hashCode();
}
//-----------------------------------------------------------------------
/**
* Outputs this chronology as a {@code String}, using the chronology ID.
*
* @return a string representation of this chronology, not null
*/
@Override
public String toString() {
return getId();
}
//-----------------------------------------------------------------------
/**
* Writes the Chronology using a
* <a href="../../../serialized-form.html#java.time.chrono.Ser">dedicated serialized form</a>.
* <pre>
* out.writeByte(1); // identifies this as a Chronology
* out.writeUTF(getId());
* </pre>
*
* @return the instance of {@code Ser}, not null
*/
Object writeReplace() {
return new Ser(Ser.CHRONO_TYPE, this);
}
/**
* Defend against malicious streams.
*
* @param s the stream to read
* @throws java.io.InvalidObjectException always
*/
private void readObject(ObjectInputStream s) throws ObjectStreamException {
throw new InvalidObjectException("Deserialization via serialization delegate");
}
void writeExternal(DataOutput out) throws IOException {
out.writeUTF(getId());
}
static Chronology readExternal(DataInput in) throws IOException {
String id = in.readUTF();
return Chronology.of(id);
}
}
|
googleapis/google-cloud-java | 35,582 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UriSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/checkoutsettings.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* URL settings for cart or checkout URL.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UriSettings}
*/
public final class UriSettings extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.UriSettings)
UriSettingsOrBuilder {
private static final long serialVersionUID = 0L;
// Use UriSettings.newBuilder() to construct.
private UriSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UriSettings() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UriSettings();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.CheckoutsettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UriSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.CheckoutsettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UriSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UriSettings.class,
com.google.shopping.merchant.accounts.v1beta.UriSettings.Builder.class);
}
private int uriTemplateCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object uriTemplate_;
public enum UriTemplateCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
CHECKOUT_URI_TEMPLATE(1),
CART_URI_TEMPLATE(2),
URITEMPLATE_NOT_SET(0);
private final int value;
private UriTemplateCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static UriTemplateCase valueOf(int value) {
return forNumber(value);
}
public static UriTemplateCase forNumber(int value) {
switch (value) {
case 1:
return CHECKOUT_URI_TEMPLATE;
case 2:
return CART_URI_TEMPLATE;
case 0:
return URITEMPLATE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public UriTemplateCase getUriTemplateCase() {
return UriTemplateCase.forNumber(uriTemplateCase_);
}
public static final int CHECKOUT_URI_TEMPLATE_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return Whether the checkoutUriTemplate field is set.
*/
public boolean hasCheckoutUriTemplate() {
return uriTemplateCase_ == 1;
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return The checkoutUriTemplate.
*/
public java.lang.String getCheckoutUriTemplate() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 1) {
ref = uriTemplate_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (uriTemplateCase_ == 1) {
uriTemplate_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return The bytes for checkoutUriTemplate.
*/
public com.google.protobuf.ByteString getCheckoutUriTemplateBytes() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 1) {
ref = uriTemplate_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (uriTemplateCase_ == 1) {
uriTemplate_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CART_URI_TEMPLATE_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return Whether the cartUriTemplate field is set.
*/
public boolean hasCartUriTemplate() {
return uriTemplateCase_ == 2;
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return The cartUriTemplate.
*/
public java.lang.String getCartUriTemplate() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 2) {
ref = uriTemplate_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (uriTemplateCase_ == 2) {
uriTemplate_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return The bytes for cartUriTemplate.
*/
public com.google.protobuf.ByteString getCartUriTemplateBytes() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 2) {
ref = uriTemplate_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (uriTemplateCase_ == 2) {
uriTemplate_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (uriTemplateCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uriTemplate_);
}
if (uriTemplateCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uriTemplate_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (uriTemplateCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uriTemplate_);
}
if (uriTemplateCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uriTemplate_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.UriSettings)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.UriSettings other =
(com.google.shopping.merchant.accounts.v1beta.UriSettings) obj;
if (!getUriTemplateCase().equals(other.getUriTemplateCase())) return false;
switch (uriTemplateCase_) {
case 1:
if (!getCheckoutUriTemplate().equals(other.getCheckoutUriTemplate())) return false;
break;
case 2:
if (!getCartUriTemplate().equals(other.getCartUriTemplate())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (uriTemplateCase_) {
case 1:
hash = (37 * hash) + CHECKOUT_URI_TEMPLATE_FIELD_NUMBER;
hash = (53 * hash) + getCheckoutUriTemplate().hashCode();
break;
case 2:
hash = (37 * hash) + CART_URI_TEMPLATE_FIELD_NUMBER;
hash = (53 * hash) + getCartUriTemplate().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.UriSettings prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* URL settings for cart or checkout URL.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UriSettings}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.UriSettings)
com.google.shopping.merchant.accounts.v1beta.UriSettingsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.CheckoutsettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UriSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.CheckoutsettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UriSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UriSettings.class,
com.google.shopping.merchant.accounts.v1beta.UriSettings.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1beta.UriSettings.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
uriTemplateCase_ = 0;
uriTemplate_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.CheckoutsettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UriSettings_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UriSettings getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.UriSettings.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UriSettings build() {
com.google.shopping.merchant.accounts.v1beta.UriSettings result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UriSettings buildPartial() {
com.google.shopping.merchant.accounts.v1beta.UriSettings result =
new com.google.shopping.merchant.accounts.v1beta.UriSettings(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.shopping.merchant.accounts.v1beta.UriSettings result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(
com.google.shopping.merchant.accounts.v1beta.UriSettings result) {
result.uriTemplateCase_ = uriTemplateCase_;
result.uriTemplate_ = this.uriTemplate_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1beta.UriSettings) {
return mergeFrom((com.google.shopping.merchant.accounts.v1beta.UriSettings) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.accounts.v1beta.UriSettings other) {
if (other == com.google.shopping.merchant.accounts.v1beta.UriSettings.getDefaultInstance())
return this;
switch (other.getUriTemplateCase()) {
case CHECKOUT_URI_TEMPLATE:
{
uriTemplateCase_ = 1;
uriTemplate_ = other.uriTemplate_;
onChanged();
break;
}
case CART_URI_TEMPLATE:
{
uriTemplateCase_ = 2;
uriTemplate_ = other.uriTemplate_;
onChanged();
break;
}
case URITEMPLATE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
uriTemplateCase_ = 1;
uriTemplate_ = s;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
uriTemplateCase_ = 2;
uriTemplate_ = s;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int uriTemplateCase_ = 0;
private java.lang.Object uriTemplate_;
public UriTemplateCase getUriTemplateCase() {
return UriTemplateCase.forNumber(uriTemplateCase_);
}
public Builder clearUriTemplate() {
uriTemplateCase_ = 0;
uriTemplate_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return Whether the checkoutUriTemplate field is set.
*/
@java.lang.Override
public boolean hasCheckoutUriTemplate() {
return uriTemplateCase_ == 1;
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return The checkoutUriTemplate.
*/
@java.lang.Override
public java.lang.String getCheckoutUriTemplate() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 1) {
ref = uriTemplate_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (uriTemplateCase_ == 1) {
uriTemplate_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return The bytes for checkoutUriTemplate.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCheckoutUriTemplateBytes() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 1) {
ref = uriTemplate_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (uriTemplateCase_ == 1) {
uriTemplate_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @param value The checkoutUriTemplate to set.
* @return This builder for chaining.
*/
public Builder setCheckoutUriTemplate(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
uriTemplateCase_ = 1;
uriTemplate_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCheckoutUriTemplate() {
if (uriTemplateCase_ == 1) {
uriTemplateCase_ = 0;
uriTemplate_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Checkout URL template. When the placeholders are expanded will redirect
* the buyer to the merchant checkout page with the item in the cart. For
* more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string checkout_uri_template = 1;</code>
*
* @param value The bytes for checkoutUriTemplate to set.
* @return This builder for chaining.
*/
public Builder setCheckoutUriTemplateBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
uriTemplateCase_ = 1;
uriTemplate_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return Whether the cartUriTemplate field is set.
*/
@java.lang.Override
public boolean hasCartUriTemplate() {
return uriTemplateCase_ == 2;
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return The cartUriTemplate.
*/
@java.lang.Override
public java.lang.String getCartUriTemplate() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 2) {
ref = uriTemplate_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (uriTemplateCase_ == 2) {
uriTemplate_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return The bytes for cartUriTemplate.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCartUriTemplateBytes() {
java.lang.Object ref = "";
if (uriTemplateCase_ == 2) {
ref = uriTemplate_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (uriTemplateCase_ == 2) {
uriTemplate_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @param value The cartUriTemplate to set.
* @return This builder for chaining.
*/
public Builder setCartUriTemplate(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
uriTemplateCase_ = 2;
uriTemplate_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearCartUriTemplate() {
if (uriTemplateCase_ == 2) {
uriTemplateCase_ = 0;
uriTemplate_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Cart URL template. When the placeholders are expanded will redirect the
* buyer to the cart page on the merchant website with the selected
* item in cart. For more details, check the [help center
* doc](https://support.google.com/merchants/answer/13945960#method1&zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting)
* </pre>
*
* <code>string cart_uri_template = 2;</code>
*
* @param value The bytes for cartUriTemplate to set.
* @return This builder for chaining.
*/
public Builder setCartUriTemplateBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
uriTemplateCase_ = 2;
uriTemplate_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.UriSettings)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.UriSettings)
private static final com.google.shopping.merchant.accounts.v1beta.UriSettings DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.UriSettings();
}
public static com.google.shopping.merchant.accounts.v1beta.UriSettings getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UriSettings> PARSER =
new com.google.protobuf.AbstractParser<UriSettings>() {
@java.lang.Override
public UriSettings parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UriSettings> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UriSettings> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UriSettings getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,820 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonRegionUrlMapsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.RegionUrlMapsClient.ListPagedResponse;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.DeleteRegionUrlMapRequest;
import com.google.cloud.compute.v1.GetRegionUrlMapRequest;
import com.google.cloud.compute.v1.InsertRegionUrlMapRequest;
import com.google.cloud.compute.v1.ListRegionUrlMapsRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.Operation.Status;
import com.google.cloud.compute.v1.PatchRegionUrlMapRequest;
import com.google.cloud.compute.v1.UpdateRegionUrlMapRequest;
import com.google.cloud.compute.v1.UrlMap;
import com.google.cloud.compute.v1.UrlMapList;
import com.google.cloud.compute.v1.UrlMapsValidateResponse;
import com.google.cloud.compute.v1.ValidateRegionUrlMapRequest;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the RegionUrlMaps service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonRegionUrlMapsStub extends RegionUrlMapsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder().add(Operation.getDescriptor()).build();
private static final ApiMethodDescriptor<DeleteRegionUrlMapRequest, Operation>
deleteMethodDescriptor =
ApiMethodDescriptor.<DeleteRegionUrlMapRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Delete")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps/{urlMap}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
serializer.putPathParam(fields, "urlMap", request.getUrlMap());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteRegionUrlMapRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<GetRegionUrlMapRequest, UrlMap> getMethodDescriptor =
ApiMethodDescriptor.<GetRegionUrlMapRequest, UrlMap>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Get")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps/{urlMap}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
serializer.putPathParam(fields, "urlMap", request.getUrlMap());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<UrlMap>newBuilder()
.setDefaultInstance(UrlMap.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<InsertRegionUrlMapRequest, Operation>
insertMethodDescriptor =
ApiMethodDescriptor.<InsertRegionUrlMapRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Insert")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<InsertRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<InsertRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<InsertRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("urlMapResource", request.getUrlMapResource(), false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(InsertRegionUrlMapRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<ListRegionUrlMapsRequest, UrlMapList>
listMethodDescriptor =
ApiMethodDescriptor.<ListRegionUrlMapsRequest, UrlMapList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/List")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListRegionUrlMapsRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListRegionUrlMapsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListRegionUrlMapsRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<UrlMapList>newBuilder()
.setDefaultInstance(UrlMapList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<PatchRegionUrlMapRequest, Operation>
patchMethodDescriptor =
ApiMethodDescriptor.<PatchRegionUrlMapRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Patch")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<PatchRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps/{urlMap}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<PatchRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
serializer.putPathParam(fields, "urlMap", request.getUrlMap());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<PatchRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("urlMapResource", request.getUrlMapResource(), false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(PatchRegionUrlMapRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<UpdateRegionUrlMapRequest, Operation>
updateMethodDescriptor =
ApiMethodDescriptor.<UpdateRegionUrlMapRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Update")
.setHttpMethod("PUT")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps/{urlMap}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
serializer.putPathParam(fields, "urlMap", request.getUrlMap());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("urlMapResource", request.getUrlMapResource(), false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(UpdateRegionUrlMapRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
opName.append(":").append(request.getRegion());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<ValidateRegionUrlMapRequest, UrlMapsValidateResponse>
validateMethodDescriptor =
ApiMethodDescriptor.<ValidateRegionUrlMapRequest, UrlMapsValidateResponse>newBuilder()
.setFullMethodName("google.cloud.compute.v1.RegionUrlMaps/Validate")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ValidateRegionUrlMapRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/regions/{region}/urlMaps/{urlMap}/validate",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ValidateRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(fields, "region", request.getRegion());
serializer.putPathParam(fields, "urlMap", request.getUrlMap());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ValidateRegionUrlMapRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"regionUrlMapsValidateRequestResource",
request.getRegionUrlMapsValidateRequestResource(),
false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<UrlMapsValidateResponse>newBuilder()
.setDefaultInstance(UrlMapsValidateResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<DeleteRegionUrlMapRequest, Operation> deleteCallable;
private final OperationCallable<DeleteRegionUrlMapRequest, Operation, Operation>
deleteOperationCallable;
private final UnaryCallable<GetRegionUrlMapRequest, UrlMap> getCallable;
private final UnaryCallable<InsertRegionUrlMapRequest, Operation> insertCallable;
private final OperationCallable<InsertRegionUrlMapRequest, Operation, Operation>
insertOperationCallable;
private final UnaryCallable<ListRegionUrlMapsRequest, UrlMapList> listCallable;
private final UnaryCallable<ListRegionUrlMapsRequest, ListPagedResponse> listPagedCallable;
private final UnaryCallable<PatchRegionUrlMapRequest, Operation> patchCallable;
private final OperationCallable<PatchRegionUrlMapRequest, Operation, Operation>
patchOperationCallable;
private final UnaryCallable<UpdateRegionUrlMapRequest, Operation> updateCallable;
private final OperationCallable<UpdateRegionUrlMapRequest, Operation, Operation>
updateOperationCallable;
private final UnaryCallable<ValidateRegionUrlMapRequest, UrlMapsValidateResponse>
validateCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonRegionOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonRegionUrlMapsStub create(RegionUrlMapsStubSettings settings)
throws IOException {
return new HttpJsonRegionUrlMapsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonRegionUrlMapsStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonRegionUrlMapsStub(
RegionUrlMapsStubSettings.newBuilder().build(), clientContext);
}
public static final HttpJsonRegionUrlMapsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonRegionUrlMapsStub(
RegionUrlMapsStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonRegionUrlMapsStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonRegionUrlMapsStub(
RegionUrlMapsStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonRegionUrlMapsCallableFactory());
}
/**
* Constructs an instance of HttpJsonRegionUrlMapsStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonRegionUrlMapsStub(
RegionUrlMapsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonRegionOperationsStub.create(clientContext, callableFactory);
HttpJsonCallSettings<DeleteRegionUrlMapRequest, Operation> deleteTransportSettings =
HttpJsonCallSettings.<DeleteRegionUrlMapRequest, Operation>newBuilder()
.setMethodDescriptor(deleteMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
builder.add("url_map", String.valueOf(request.getUrlMap()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetRegionUrlMapRequest, UrlMap> getTransportSettings =
HttpJsonCallSettings.<GetRegionUrlMapRequest, UrlMap>newBuilder()
.setMethodDescriptor(getMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
builder.add("url_map", String.valueOf(request.getUrlMap()));
return builder.build();
})
.build();
HttpJsonCallSettings<InsertRegionUrlMapRequest, Operation> insertTransportSettings =
HttpJsonCallSettings.<InsertRegionUrlMapRequest, Operation>newBuilder()
.setMethodDescriptor(insertMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListRegionUrlMapsRequest, UrlMapList> listTransportSettings =
HttpJsonCallSettings.<ListRegionUrlMapsRequest, UrlMapList>newBuilder()
.setMethodDescriptor(listMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
return builder.build();
})
.build();
HttpJsonCallSettings<PatchRegionUrlMapRequest, Operation> patchTransportSettings =
HttpJsonCallSettings.<PatchRegionUrlMapRequest, Operation>newBuilder()
.setMethodDescriptor(patchMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
builder.add("url_map", String.valueOf(request.getUrlMap()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateRegionUrlMapRequest, Operation> updateTransportSettings =
HttpJsonCallSettings.<UpdateRegionUrlMapRequest, Operation>newBuilder()
.setMethodDescriptor(updateMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
builder.add("url_map", String.valueOf(request.getUrlMap()));
return builder.build();
})
.build();
HttpJsonCallSettings<ValidateRegionUrlMapRequest, UrlMapsValidateResponse>
validateTransportSettings =
HttpJsonCallSettings.<ValidateRegionUrlMapRequest, UrlMapsValidateResponse>newBuilder()
.setMethodDescriptor(validateMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("region", String.valueOf(request.getRegion()));
builder.add("url_map", String.valueOf(request.getUrlMap()));
return builder.build();
})
.build();
this.deleteCallable =
callableFactory.createUnaryCallable(
deleteTransportSettings, settings.deleteSettings(), clientContext);
this.deleteOperationCallable =
callableFactory.createOperationCallable(
deleteTransportSettings,
settings.deleteOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getCallable =
callableFactory.createUnaryCallable(
getTransportSettings, settings.getSettings(), clientContext);
this.insertCallable =
callableFactory.createUnaryCallable(
insertTransportSettings, settings.insertSettings(), clientContext);
this.insertOperationCallable =
callableFactory.createOperationCallable(
insertTransportSettings,
settings.insertOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listCallable =
callableFactory.createUnaryCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.listPagedCallable =
callableFactory.createPagedCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.patchCallable =
callableFactory.createUnaryCallable(
patchTransportSettings, settings.patchSettings(), clientContext);
this.patchOperationCallable =
callableFactory.createOperationCallable(
patchTransportSettings,
settings.patchOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.updateCallable =
callableFactory.createUnaryCallable(
updateTransportSettings, settings.updateSettings(), clientContext);
this.updateOperationCallable =
callableFactory.createOperationCallable(
updateTransportSettings,
settings.updateOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.validateCallable =
callableFactory.createUnaryCallable(
validateTransportSettings, settings.validateSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(deleteMethodDescriptor);
methodDescriptors.add(getMethodDescriptor);
methodDescriptors.add(insertMethodDescriptor);
methodDescriptors.add(listMethodDescriptor);
methodDescriptors.add(patchMethodDescriptor);
methodDescriptors.add(updateMethodDescriptor);
methodDescriptors.add(validateMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<DeleteRegionUrlMapRequest, Operation> deleteCallable() {
return deleteCallable;
}
@Override
public OperationCallable<DeleteRegionUrlMapRequest, Operation, Operation>
deleteOperationCallable() {
return deleteOperationCallable;
}
@Override
public UnaryCallable<GetRegionUrlMapRequest, UrlMap> getCallable() {
return getCallable;
}
@Override
public UnaryCallable<InsertRegionUrlMapRequest, Operation> insertCallable() {
return insertCallable;
}
@Override
public OperationCallable<InsertRegionUrlMapRequest, Operation, Operation>
insertOperationCallable() {
return insertOperationCallable;
}
@Override
public UnaryCallable<ListRegionUrlMapsRequest, UrlMapList> listCallable() {
return listCallable;
}
@Override
public UnaryCallable<ListRegionUrlMapsRequest, ListPagedResponse> listPagedCallable() {
return listPagedCallable;
}
@Override
public UnaryCallable<PatchRegionUrlMapRequest, Operation> patchCallable() {
return patchCallable;
}
@Override
public OperationCallable<PatchRegionUrlMapRequest, Operation, Operation>
patchOperationCallable() {
return patchOperationCallable;
}
@Override
public UnaryCallable<UpdateRegionUrlMapRequest, Operation> updateCallable() {
return updateCallable;
}
@Override
public OperationCallable<UpdateRegionUrlMapRequest, Operation, Operation>
updateOperationCallable() {
return updateOperationCallable;
}
@Override
public UnaryCallable<ValidateRegionUrlMapRequest, UrlMapsValidateResponse> validateCallable() {
return validateCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
openjdk/jdk8 | 35,608 | jdk/src/share/classes/sun/awt/AWTAccessor.java | /*
* Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt;
import sun.misc.Unsafe;
import java.awt.*;
import java.awt.KeyboardFocusManager;
import java.awt.DefaultKeyboardFocusManager;
import java.awt.event.InputEvent;
import java.awt.event.InvocationEvent;
import java.awt.event.KeyEvent;
import java.awt.geom.Point2D;
import java.awt.peer.ComponentPeer;
import java.lang.reflect.InvocationTargetException;
import java.security.AccessControlContext;
import java.io.File;
import java.util.ResourceBundle;
import java.util.Vector;
/**
* The AWTAccessor utility class.
* The main purpose of this class is to enable accessing
* private and package-private fields of classes from
* different classes/packages. See sun.misc.SharedSecretes
* for another example.
*/
public final class AWTAccessor {
private static final Unsafe unsafe = Unsafe.getUnsafe();
/*
* We don't need any objects of this class.
* It's rather a collection of static methods
* and interfaces.
*/
private AWTAccessor() {
}
/*
* An interface of accessor for the java.awt.Component class.
*/
public interface ComponentAccessor {
/*
* Sets whether the native background erase for a component
* has been disabled via SunToolkit.disableBackgroundErase().
*/
void setBackgroundEraseDisabled(Component comp, boolean disabled);
/*
* Indicates whether the native background erase for a
* component has been disabled via
* SunToolkit.disableBackgroundErase().
*/
boolean getBackgroundEraseDisabled(Component comp);
/*
*
* Gets the bounds of this component in the form of a
* <code>Rectangle</code> object. The bounds specify this
* component's width, height, and location relative to
* its parent.
*/
Rectangle getBounds(Component comp);
/*
* Sets the shape of a lw component to cut out from hw components.
*
* See 6797587, 6776743, 6768307, and 6768332 for details
*/
void setMixingCutoutShape(Component comp, Shape shape);
/**
* Sets GraphicsConfiguration value for the component.
*/
void setGraphicsConfiguration(Component comp, GraphicsConfiguration gc);
/*
* Requests focus to the component.
*/
boolean requestFocus(Component comp, CausedFocusEvent.Cause cause);
/*
* Determines if the component can gain focus.
*/
boolean canBeFocusOwner(Component comp);
/**
* Returns whether the component is visible without invoking
* any client code.
*/
boolean isVisible(Component comp);
/**
* Sets the RequestFocusController.
*/
void setRequestFocusController(RequestFocusController requestController);
/**
* Returns the appContext of the component.
*/
AppContext getAppContext(Component comp);
/**
* Sets the appContext of the component.
*/
void setAppContext(Component comp, AppContext appContext);
/**
* Returns the parent of the component.
*/
Container getParent(Component comp);
/**
* Sets the parent of the component to the specified parent.
*/
void setParent(Component comp, Container parent);
/**
* Resizes the component to the specified width and height.
*/
void setSize(Component comp, int width, int height);
/**
* Returns the location of the component.
*/
Point getLocation(Component comp);
/**
* Moves the component to the new location.
*/
void setLocation(Component comp, int x, int y);
/**
* Determines whether this component is enabled.
*/
boolean isEnabled(Component comp);
/**
* Determines whether this component is displayable.
*/
boolean isDisplayable(Component comp);
/**
* Gets the cursor set in the component.
*/
Cursor getCursor(Component comp);
/**
* Returns the peer of the component.
*/
ComponentPeer getPeer(Component comp);
/**
* Sets the peer of the component to the specified peer.
*/
void setPeer(Component comp, ComponentPeer peer);
/**
* Determines whether this component is lightweight.
*/
boolean isLightweight(Component comp);
/**
* Returns whether or not paint messages received from
* the operating system should be ignored.
*/
boolean getIgnoreRepaint(Component comp);
/**
* Returns the width of the component.
*/
int getWidth(Component comp);
/**
* Returns the height of the component.
*/
int getHeight(Component comp);
/**
* Returns the x coordinate of the component.
*/
int getX(Component comp);
/**
* Returns the y coordinate of the component.
*/
int getY(Component comp);
/**
* Gets the foreground color of this component.
*/
Color getForeground(Component comp);
/**
* Gets the background color of this component.
*/
Color getBackground(Component comp);
/**
* Sets the background of this component to the specified color.
*/
void setBackground(Component comp, Color background);
/**
* Gets the font of the component.
*/
Font getFont(Component comp);
/**
* Processes events occurring on this component.
*/
void processEvent(Component comp, AWTEvent e);
/*
* Returns the acc this component was constructed with.
*/
AccessControlContext getAccessControlContext(Component comp);
/**
* Revalidates the component synchronously.
*/
void revalidateSynchronously(Component comp);
}
/*
* An interface of accessor for the java.awt.Container class.
*/
public interface ContainerAccessor {
/**
* Validates the container unconditionally.
*/
void validateUnconditionally(Container cont);
/**
*
* Access to the private version of findComponentAt method which has
* a controllable behavior. Setting 'ignoreEnabled' to 'false'
* bypasses disabled Components during the search.
*/
Component findComponentAt(Container cont, int x, int y, boolean ignoreEnabled);
}
/*
* An interface of accessor for java.awt.Window class.
*/
public interface WindowAccessor {
/*
* Get opacity level of the given window.
*/
float getOpacity(Window window);
/*
* Set opacity level to the given window.
*/
void setOpacity(Window window, float opacity);
/*
* Get a shape assigned to the given window.
*/
Shape getShape(Window window);
/*
* Set a shape to the given window.
*/
void setShape(Window window, Shape shape);
/*
* Set the opaque preoperty to the given window.
*/
void setOpaque(Window window, boolean isOpaque);
/*
* Update the image of a non-opaque (translucent) window.
*/
void updateWindow(Window window);
/** Get the size of the security warning.
*/
Dimension getSecurityWarningSize(Window w);
/**
* Set the size of the security warning.
*/
void setSecurityWarningSize(Window w, int width, int height);
/** Set the position of the security warning.
*/
void setSecurityWarningPosition(Window w, Point2D point,
float alignmentX, float alignmentY);
/** Request to recalculate the new position of the security warning for
* the given window size/location as reported by the native system.
*/
Point2D calculateSecurityWarningPosition(Window window,
double x, double y, double w, double h);
/** Sets the synchronous status of focus requests on lightweight
* components in the specified window to the specified value.
*/
void setLWRequestStatus(Window changed, boolean status);
/**
* Indicates whether this window should receive focus on subsequently
* being shown, or being moved to the front.
*/
boolean isAutoRequestFocus(Window w);
/**
* Indicates whether the specified window is an utility window for TrayIcon.
*/
boolean isTrayIconWindow(Window w);
/**
* Marks the specified window as an utility window for TrayIcon.
*/
void setTrayIconWindow(Window w, boolean isTrayIconWindow);
}
/**
* An accessor for the AWTEvent class.
*/
public interface AWTEventAccessor {
/**
* Marks the event as posted.
*/
void setPosted(AWTEvent ev);
/**
* Sets the flag on this AWTEvent indicating that it was
* generated by the system.
*/
void setSystemGenerated(AWTEvent ev);
/**
* Indicates whether this AWTEvent was generated by the system.
*/
boolean isSystemGenerated(AWTEvent ev);
/**
* Returns the acc this event was constructed with.
*/
AccessControlContext getAccessControlContext(AWTEvent ev);
/**
* Returns binary data associated with this event;
*/
byte[] getBData(AWTEvent ev);
/**
* Associates binary data with this event;
*/
void setBData(AWTEvent ev, byte[] bdata);
}
public interface InputEventAccessor {
/*
* Accessor for InputEvent.getButtonDownMasks()
*/
int[] getButtonDownMasks();
}
/*
* An accessor for the java.awt.Frame class.
*/
public interface FrameAccessor {
/*
* Sets the state of this frame.
*/
void setExtendedState(Frame frame, int state);
/*
* Gets the state of this frame.
*/
int getExtendedState(Frame frame);
/*
* Gets the maximized bounds of this frame.
*/
Rectangle getMaximizedBounds(Frame frame);
}
/**
* An interface of accessor for the java.awt.KeyboardFocusManager class.
*/
public interface KeyboardFocusManagerAccessor {
/**
* Indicates whether the native implementation should
* proceed with a pending focus request for the heavyweight.
*/
int shouldNativelyFocusHeavyweight(Component heavyweight,
Component descendant,
boolean temporary,
boolean focusedWindowChangeAllowed,
long time,
CausedFocusEvent.Cause cause);
/**
* Delivers focus for the lightweight descendant of the heavyweight
* synchronously.
*/
boolean processSynchronousLightweightTransfer(Component heavyweight,
Component descendant,
boolean temporary,
boolean focusedWindowChangeAllowed,
long time);
/**
* Removes the last focus request for the heavyweight from the queue.
*/
void removeLastFocusRequest(Component heavyweight);
/**
* Sets the most recent focus owner in the window.
*/
void setMostRecentFocusOwner(Window window, Component component);
/**
* Returns current KFM of the specified AppContext.
*/
KeyboardFocusManager getCurrentKeyboardFocusManager(AppContext ctx);
/**
* Return the current focus cycle root
*/
Container getCurrentFocusCycleRoot();
}
/**
* An accessor for the MenuComponent class.
*/
public interface MenuComponentAccessor {
/**
* Returns the appContext of the menu component.
*/
AppContext getAppContext(MenuComponent menuComp);
/**
* Sets the appContext of the menu component.
*/
void setAppContext(MenuComponent menuComp, AppContext appContext);
/**
* Returns the menu container of the menu component
*/
MenuContainer getParent(MenuComponent menuComp);
/**
* Gets the font used for this menu component.
*/
Font getFont_NoClientCode(MenuComponent menuComp);
}
/**
* An accessor for the EventQueue class
*/
public interface EventQueueAccessor {
/**
* Gets the event dispatch thread.
*/
Thread getDispatchThread(EventQueue eventQueue);
/**
* Checks if the current thread is EDT for the given EQ.
*/
public boolean isDispatchThreadImpl(EventQueue eventQueue);
/**
* Removes any pending events for the specified source object.
*/
void removeSourceEvents(EventQueue eventQueue, Object source, boolean removeAllEvents);
/**
* Returns whether an event is pending on any of the separate Queues.
*/
boolean noEvents(EventQueue eventQueue);
/**
* Called from PostEventQueue.postEvent to notify that a new event
* appeared.
*/
void wakeup(EventQueue eventQueue, boolean isShutdown);
/**
* Static in EventQueue
*/
void invokeAndWait(Object source, Runnable r)
throws InterruptedException, InvocationTargetException;
/**
* Sets the delegate for the EventQueue used by FX/AWT single threaded mode
*/
public void setFwDispatcher(EventQueue eventQueue, FwDispatcher dispatcher);
}
/*
* An accessor for the PopupMenu class
*/
public interface PopupMenuAccessor {
/*
* Returns whether the popup menu is attached to a tray
*/
boolean isTrayIconPopup(PopupMenu popupMenu);
}
/*
* An accessor for the FileDialog class
*/
public interface FileDialogAccessor {
/*
* Sets the files the user selects
*/
void setFiles(FileDialog fileDialog, File files[]);
/*
* Sets the file the user selects
*/
void setFile(FileDialog fileDialog, String file);
/*
* Sets the directory the user selects
*/
void setDirectory(FileDialog fileDialog, String directory);
/*
* Returns whether the file dialog allows the multiple file selection.
*/
boolean isMultipleMode(FileDialog fileDialog);
}
/*
* An accessor for the ScrollPaneAdjustable class.
*/
public interface ScrollPaneAdjustableAccessor {
/*
* Sets the value of this scrollbar to the specified value.
*/
void setTypedValue(final ScrollPaneAdjustable adj, final int v,
final int type);
}
/**
* An accessor for the CheckboxMenuItem class
*/
public interface CheckboxMenuItemAccessor {
/**
* Returns whether menu item is checked
*/
boolean getState(CheckboxMenuItem cmi);
}
/**
* An accessor for the Cursor class
*/
public interface CursorAccessor {
/**
* Returns pData of the Cursor class
*/
long getPData(Cursor cursor);
/**
* Sets pData to the Cursor class
*/
void setPData(Cursor cursor, long pData);
/**
* Return type of the Cursor class
*/
int getType(Cursor cursor);
}
/**
* An accessor for the MenuBar class
*/
public interface MenuBarAccessor {
/**
* Returns help menu
*/
Menu getHelpMenu(MenuBar menuBar);
/**
* Returns menus
*/
Vector getMenus(MenuBar menuBar);
}
/**
* An accessor for the MenuItem class
*/
public interface MenuItemAccessor {
/**
* Returns whether menu item is enabled
*/
boolean isEnabled(MenuItem item);
/**
* Gets the command name of the action event that is fired
* by this menu item.
*/
String getActionCommandImpl(MenuItem item);
/**
* Returns true if the item and all its ancestors are
* enabled, false otherwise
*/
boolean isItemEnabled(MenuItem item);
/**
* Returns label
*/
String getLabel(MenuItem item);
/**
* Returns shortcut
*/
MenuShortcut getShortcut(MenuItem item);
}
/**
* An accessor for the Menu class
*/
public interface MenuAccessor {
/**
* Returns vector of the items that are part of the Menu
*/
Vector getItems(Menu menu);
}
/**
* An accessor for the KeyEvent class
*/
public interface KeyEventAccessor {
/**
* Sets rawCode field for KeyEvent
*/
void setRawCode(KeyEvent ev, long rawCode);
/**
* Sets primaryLevelUnicode field for KeyEvent
*/
void setPrimaryLevelUnicode(KeyEvent ev, long primaryLevelUnicode);
/**
* Sets extendedKeyCode field for KeyEvent
*/
void setExtendedKeyCode(KeyEvent ev, long extendedKeyCode);
/**
* Gets original source for KeyEvent
*/
Component getOriginalSource(KeyEvent ev);
}
/**
* An accessor for the ClientPropertyKey class
*/
public interface ClientPropertyKeyAccessor {
/**
* Retrieves JComponent_TRANSFER_HANDLER enum object
*/
Object getJComponent_TRANSFER_HANDLER();
}
/**
* An accessor for the SystemTray class
*/
public interface SystemTrayAccessor {
/**
* Support for reporting bound property changes for Object properties.
*/
void firePropertyChange(SystemTray tray, String propertyName, Object oldValue, Object newValue);
}
/**
* An accessor for the TrayIcon class
*/
public interface TrayIconAccessor {
void addNotify(TrayIcon trayIcon) throws AWTException;
void removeNotify(TrayIcon trayIcon);
}
/**
* An accessor for the DefaultKeyboardFocusManager class
*/
public interface DefaultKeyboardFocusManagerAccessor {
public void consumeNextKeyTyped(DefaultKeyboardFocusManager dkfm, KeyEvent e);
}
/*
* An accessor for the SequencedEventAccessor class
*/
public interface SequencedEventAccessor {
/*
* Returns the nested event.
*/
AWTEvent getNested(AWTEvent sequencedEvent);
/*
* Returns true if the event is an instances of SequencedEvent.
*/
boolean isSequencedEvent(AWTEvent event);
}
/*
* An accessor for the Toolkit class
*/
public interface ToolkitAccessor {
void setPlatformResources(ResourceBundle bundle);
}
/*
* An accessor object for the InvocationEvent class
*/
public interface InvocationEventAccessor {
void dispose(InvocationEvent event);
}
/*
* Accessor instances are initialized in the static initializers of
* corresponding AWT classes by using setters defined below.
*/
private static ComponentAccessor componentAccessor;
private static ContainerAccessor containerAccessor;
private static WindowAccessor windowAccessor;
private static AWTEventAccessor awtEventAccessor;
private static InputEventAccessor inputEventAccessor;
private static FrameAccessor frameAccessor;
private static KeyboardFocusManagerAccessor kfmAccessor;
private static MenuComponentAccessor menuComponentAccessor;
private static EventQueueAccessor eventQueueAccessor;
private static PopupMenuAccessor popupMenuAccessor;
private static FileDialogAccessor fileDialogAccessor;
private static ScrollPaneAdjustableAccessor scrollPaneAdjustableAccessor;
private static CheckboxMenuItemAccessor checkboxMenuItemAccessor;
private static CursorAccessor cursorAccessor;
private static MenuBarAccessor menuBarAccessor;
private static MenuItemAccessor menuItemAccessor;
private static MenuAccessor menuAccessor;
private static KeyEventAccessor keyEventAccessor;
private static ClientPropertyKeyAccessor clientPropertyKeyAccessor;
private static SystemTrayAccessor systemTrayAccessor;
private static TrayIconAccessor trayIconAccessor;
private static DefaultKeyboardFocusManagerAccessor defaultKeyboardFocusManagerAccessor;
private static SequencedEventAccessor sequencedEventAccessor;
private static ToolkitAccessor toolkitAccessor;
private static InvocationEventAccessor invocationEventAccessor;
/*
* Set an accessor object for the java.awt.Component class.
*/
public static void setComponentAccessor(ComponentAccessor ca) {
componentAccessor = ca;
}
/*
* Retrieve the accessor object for the java.awt.Component class.
*/
public static ComponentAccessor getComponentAccessor() {
if (componentAccessor == null) {
unsafe.ensureClassInitialized(Component.class);
}
return componentAccessor;
}
/*
* Set an accessor object for the java.awt.Container class.
*/
public static void setContainerAccessor(ContainerAccessor ca) {
containerAccessor = ca;
}
/*
* Retrieve the accessor object for the java.awt.Container class.
*/
public static ContainerAccessor getContainerAccessor() {
if (containerAccessor == null) {
unsafe.ensureClassInitialized(Container.class);
}
return containerAccessor;
}
/*
* Set an accessor object for the java.awt.Window class.
*/
public static void setWindowAccessor(WindowAccessor wa) {
windowAccessor = wa;
}
/*
* Retrieve the accessor object for the java.awt.Window class.
*/
public static WindowAccessor getWindowAccessor() {
if (windowAccessor == null) {
unsafe.ensureClassInitialized(Window.class);
}
return windowAccessor;
}
/*
* Set an accessor object for the java.awt.AWTEvent class.
*/
public static void setAWTEventAccessor(AWTEventAccessor aea) {
awtEventAccessor = aea;
}
/*
* Retrieve the accessor object for the java.awt.AWTEvent class.
*/
public static AWTEventAccessor getAWTEventAccessor() {
if (awtEventAccessor == null) {
unsafe.ensureClassInitialized(AWTEvent.class);
}
return awtEventAccessor;
}
/*
* Set an accessor object for the java.awt.event.InputEvent class.
*/
public static void setInputEventAccessor(InputEventAccessor iea) {
inputEventAccessor = iea;
}
/*
* Retrieve the accessor object for the java.awt.event.InputEvent class.
*/
public static InputEventAccessor getInputEventAccessor() {
if (inputEventAccessor == null) {
unsafe.ensureClassInitialized(InputEvent.class);
}
return inputEventAccessor;
}
/*
* Set an accessor object for the java.awt.Frame class.
*/
public static void setFrameAccessor(FrameAccessor fa) {
frameAccessor = fa;
}
/*
* Retrieve the accessor object for the java.awt.Frame class.
*/
public static FrameAccessor getFrameAccessor() {
if (frameAccessor == null) {
unsafe.ensureClassInitialized(Frame.class);
}
return frameAccessor;
}
/*
* Set an accessor object for the java.awt.KeyboardFocusManager class.
*/
public static void setKeyboardFocusManagerAccessor(KeyboardFocusManagerAccessor kfma) {
kfmAccessor = kfma;
}
/*
* Retrieve the accessor object for the java.awt.KeyboardFocusManager class.
*/
public static KeyboardFocusManagerAccessor getKeyboardFocusManagerAccessor() {
if (kfmAccessor == null) {
unsafe.ensureClassInitialized(KeyboardFocusManager.class);
}
return kfmAccessor;
}
/*
* Set an accessor object for the java.awt.MenuComponent class.
*/
public static void setMenuComponentAccessor(MenuComponentAccessor mca) {
menuComponentAccessor = mca;
}
/*
* Retrieve the accessor object for the java.awt.MenuComponent class.
*/
public static MenuComponentAccessor getMenuComponentAccessor() {
if (menuComponentAccessor == null) {
unsafe.ensureClassInitialized(MenuComponent.class);
}
return menuComponentAccessor;
}
/*
* Set an accessor object for the java.awt.EventQueue class.
*/
public static void setEventQueueAccessor(EventQueueAccessor eqa) {
eventQueueAccessor = eqa;
}
/*
* Retrieve the accessor object for the java.awt.EventQueue class.
*/
public static EventQueueAccessor getEventQueueAccessor() {
if (eventQueueAccessor == null) {
unsafe.ensureClassInitialized(EventQueue.class);
}
return eventQueueAccessor;
}
/*
* Set an accessor object for the java.awt.PopupMenu class.
*/
public static void setPopupMenuAccessor(PopupMenuAccessor pma) {
popupMenuAccessor = pma;
}
/*
* Retrieve the accessor object for the java.awt.PopupMenu class.
*/
public static PopupMenuAccessor getPopupMenuAccessor() {
if (popupMenuAccessor == null) {
unsafe.ensureClassInitialized(PopupMenu.class);
}
return popupMenuAccessor;
}
/*
* Set an accessor object for the java.awt.FileDialog class.
*/
public static void setFileDialogAccessor(FileDialogAccessor fda) {
fileDialogAccessor = fda;
}
/*
* Retrieve the accessor object for the java.awt.FileDialog class.
*/
public static FileDialogAccessor getFileDialogAccessor() {
if (fileDialogAccessor == null) {
unsafe.ensureClassInitialized(FileDialog.class);
}
return fileDialogAccessor;
}
/*
* Set an accessor object for the java.awt.ScrollPaneAdjustable class.
*/
public static void setScrollPaneAdjustableAccessor(ScrollPaneAdjustableAccessor adj) {
scrollPaneAdjustableAccessor = adj;
}
/*
* Retrieve the accessor object for the java.awt.ScrollPaneAdjustable
* class.
*/
public static ScrollPaneAdjustableAccessor getScrollPaneAdjustableAccessor() {
if (scrollPaneAdjustableAccessor == null) {
unsafe.ensureClassInitialized(ScrollPaneAdjustable.class);
}
return scrollPaneAdjustableAccessor;
}
/**
* Set an accessor object for the java.awt.CheckboxMenuItem class.
*/
public static void setCheckboxMenuItemAccessor(CheckboxMenuItemAccessor cmia) {
checkboxMenuItemAccessor = cmia;
}
/**
* Retrieve the accessor object for the java.awt.CheckboxMenuItem class.
*/
public static CheckboxMenuItemAccessor getCheckboxMenuItemAccessor() {
if (checkboxMenuItemAccessor == null) {
unsafe.ensureClassInitialized(CheckboxMenuItemAccessor.class);
}
return checkboxMenuItemAccessor;
}
/**
* Set an accessor object for the java.awt.Cursor class.
*/
public static void setCursorAccessor(CursorAccessor ca) {
cursorAccessor = ca;
}
/**
* Retrieve the accessor object for the java.awt.Cursor class.
*/
public static CursorAccessor getCursorAccessor() {
if (cursorAccessor == null) {
unsafe.ensureClassInitialized(CursorAccessor.class);
}
return cursorAccessor;
}
/**
* Set an accessor object for the java.awt.MenuBar class.
*/
public static void setMenuBarAccessor(MenuBarAccessor mba) {
menuBarAccessor = mba;
}
/**
* Retrieve the accessor object for the java.awt.MenuBar class.
*/
public static MenuBarAccessor getMenuBarAccessor() {
if (menuBarAccessor == null) {
unsafe.ensureClassInitialized(MenuBarAccessor.class);
}
return menuBarAccessor;
}
/**
* Set an accessor object for the java.awt.MenuItem class.
*/
public static void setMenuItemAccessor(MenuItemAccessor mia) {
menuItemAccessor = mia;
}
/**
* Retrieve the accessor object for the java.awt.MenuItem class.
*/
public static MenuItemAccessor getMenuItemAccessor() {
if (menuItemAccessor == null) {
unsafe.ensureClassInitialized(MenuItemAccessor.class);
}
return menuItemAccessor;
}
/**
* Set an accessor object for the java.awt.Menu class.
*/
public static void setMenuAccessor(MenuAccessor ma) {
menuAccessor = ma;
}
/**
* Retrieve the accessor object for the java.awt.Menu class.
*/
public static MenuAccessor getMenuAccessor() {
if (menuAccessor == null) {
unsafe.ensureClassInitialized(MenuAccessor.class);
}
return menuAccessor;
}
/**
* Set an accessor object for the java.awt.event.KeyEvent class.
*/
public static void setKeyEventAccessor(KeyEventAccessor kea) {
keyEventAccessor = kea;
}
/**
* Retrieve the accessor object for the java.awt.event.KeyEvent class.
*/
public static KeyEventAccessor getKeyEventAccessor() {
if (keyEventAccessor == null) {
unsafe.ensureClassInitialized(KeyEventAccessor.class);
}
return keyEventAccessor;
}
/**
* Set an accessor object for the javax.swing.ClientPropertyKey class.
*/
public static void setClientPropertyKeyAccessor(ClientPropertyKeyAccessor cpka) {
clientPropertyKeyAccessor = cpka;
}
/**
* Retrieve the accessor object for the javax.swing.ClientPropertyKey class.
*/
public static ClientPropertyKeyAccessor getClientPropertyKeyAccessor() {
if (clientPropertyKeyAccessor == null) {
unsafe.ensureClassInitialized(ClientPropertyKeyAccessor.class);
}
return clientPropertyKeyAccessor;
}
/**
* Set an accessor object for the java.awt.SystemTray class.
*/
public static void setSystemTrayAccessor(SystemTrayAccessor sta) {
systemTrayAccessor = sta;
}
/**
* Retrieve the accessor object for the java.awt.SystemTray class.
*/
public static SystemTrayAccessor getSystemTrayAccessor() {
if (systemTrayAccessor == null) {
unsafe.ensureClassInitialized(SystemTrayAccessor.class);
}
return systemTrayAccessor;
}
/**
* Set an accessor object for the java.awt.TrayIcon class.
*/
public static void setTrayIconAccessor(TrayIconAccessor tia) {
trayIconAccessor = tia;
}
/**
* Retrieve the accessor object for the java.awt.TrayIcon class.
*/
public static TrayIconAccessor getTrayIconAccessor() {
if (trayIconAccessor == null) {
unsafe.ensureClassInitialized(TrayIconAccessor.class);
}
return trayIconAccessor;
}
/**
* Set an accessor object for the java.awt.DefaultKeyboardFocusManager class.
*/
public static void setDefaultKeyboardFocusManagerAccessor(DefaultKeyboardFocusManagerAccessor dkfma) {
defaultKeyboardFocusManagerAccessor = dkfma;
}
/**
* Retrieve the accessor object for the java.awt.DefaultKeyboardFocusManager class.
*/
public static DefaultKeyboardFocusManagerAccessor getDefaultKeyboardFocusManagerAccessor() {
if (defaultKeyboardFocusManagerAccessor == null) {
unsafe.ensureClassInitialized(DefaultKeyboardFocusManagerAccessor.class);
}
return defaultKeyboardFocusManagerAccessor;
}
/*
* Set an accessor object for the java.awt.SequencedEvent class.
*/
public static void setSequencedEventAccessor(SequencedEventAccessor sea) {
sequencedEventAccessor = sea;
}
/*
* Get the accessor object for the java.awt.SequencedEvent class.
*/
public static SequencedEventAccessor getSequencedEventAccessor() {
// The class is not public. So we can't ensure it's initialized.
// Null returned value means it's not initialized
// (so not a single instance of the event has been created).
return sequencedEventAccessor;
}
/*
* Set an accessor object for the java.awt.Toolkit class.
*/
public static void setToolkitAccessor(ToolkitAccessor ta) {
toolkitAccessor = ta;
}
/*
* Get the accessor object for the java.awt.Toolkit class.
*/
public static ToolkitAccessor getToolkitAccessor() {
if (toolkitAccessor == null) {
unsafe.ensureClassInitialized(Toolkit.class);
}
return toolkitAccessor;
}
/*
* Get the accessor object for the java.awt.event.InvocationEvent class.
*/
public static void setInvocationEventAccessor(InvocationEventAccessor invocationEventAccessor) {
AWTAccessor.invocationEventAccessor = invocationEventAccessor;
}
/*
* Set the accessor object for the java.awt.event.InvocationEvent class.
*/
public static InvocationEventAccessor getInvocationEventAccessor() {
return invocationEventAccessor;
}
}
|
apache/hadoop | 35,449 | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.service;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.Service.STATE;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class TestCompositeService {
private static final int NUM_OF_SERVICES = 5;
private static final int FAILED_SERVICE_SEQ_NUMBER = 2;
private static final Logger LOG =
LoggerFactory.getLogger(TestCompositeService.class);
/**
* flag to state policy of CompositeService, and hence
* what to look for after trying to stop a service from another state
* (e.g inited)
*/
private static final boolean STOP_ONLY_STARTED_SERVICES =
CompositeServiceImpl.isPolicyToStopOnlyStartedServices();
@BeforeEach
public void setup() {
CompositeServiceImpl.resetCounter();
}
@Test
public void testCallSequence() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
// Add services
for (int i = 0; i < NUM_OF_SERVICES; i++) {
CompositeServiceImpl service = new CompositeServiceImpl(i);
serviceManager.addTestService(service);
}
CompositeServiceImpl[] services = serviceManager.getServices().toArray(
new CompositeServiceImpl[0]);
assertEquals(NUM_OF_SERVICES, services.length,
"Number of registered services ");
Configuration conf = new Configuration();
// Initialise the composite service
serviceManager.init(conf);
//verify they were all inited
assertInState(STATE.INITED, services);
// Verify the init() call sequence numbers for every service
for (int i = 0; i < NUM_OF_SERVICES; i++) {
assertEquals(i, services[i].getCallSequenceNumber(),
"For " + services[i] +
" service, init() call sequence number should have been ");
}
// Reset the call sequence numbers
resetServices(services);
serviceManager.start();
//verify they were all started
assertInState(STATE.STARTED, services);
// Verify the start() call sequence numbers for every service
for (int i = 0; i < NUM_OF_SERVICES; i++) {
assertEquals(i,
services[i].getCallSequenceNumber(), "For " + services[i] +
" service, start() call sequence number should have been ");
}
resetServices(services);
serviceManager.stop();
//verify they were all stopped
assertInState(STATE.STOPPED, services);
// Verify the stop() call sequence numbers for every service
for (int i = 0; i < NUM_OF_SERVICES; i++) {
assertEquals(((NUM_OF_SERVICES - 1) - i), services[i].getCallSequenceNumber(),
"For " + services[i] +
" service, stop() call sequence number should have been ");
}
// Try to stop again. This should be a no-op.
serviceManager.stop();
// Verify that stop() call sequence numbers for every service don't change.
for (int i = 0; i < NUM_OF_SERVICES; i++) {
assertEquals(((NUM_OF_SERVICES - 1) - i), services[i].getCallSequenceNumber(),
"For " + services[i] +
" service, stop() call sequence number should have been ");
}
}
private void resetServices(CompositeServiceImpl[] services) {
// Reset the call sequence numbers
for (int i = 0; i < NUM_OF_SERVICES; i++) {
services[i].reset();
}
}
@Test
public void testServiceStartup() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
// Add services
for (int i = 0; i < NUM_OF_SERVICES; i++) {
CompositeServiceImpl service = new CompositeServiceImpl(i);
if (i == FAILED_SERVICE_SEQ_NUMBER) {
service.setThrowExceptionOnStart(true);
}
serviceManager.addTestService(service);
}
CompositeServiceImpl[] services = serviceManager.getServices().toArray(
new CompositeServiceImpl[0]);
Configuration conf = new Configuration();
// Initialise the composite service
serviceManager.init(conf);
// Start the composite service
try {
serviceManager.start();
fail("Exception should have been thrown due to startup failure of last service");
} catch (ServiceTestRuntimeException e) {
for (int i = 0; i < NUM_OF_SERVICES - 1; i++) {
if (i >= FAILED_SERVICE_SEQ_NUMBER && STOP_ONLY_STARTED_SERVICES) {
// Failed service state should be INITED
assertEquals(STATE.INITED, services[NUM_OF_SERVICES - 1].getServiceState(),
"Service state should have been ");
} else {
assertEquals(STATE.STOPPED, services[i].getServiceState(),
"Service state should have been ");
}
}
}
}
@Test
public void testServiceStop() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
// Add services
for (int i = 0; i < NUM_OF_SERVICES; i++) {
CompositeServiceImpl service = new CompositeServiceImpl(i);
if (i == FAILED_SERVICE_SEQ_NUMBER) {
service.setThrowExceptionOnStop(true);
}
serviceManager.addTestService(service);
}
CompositeServiceImpl[] services = serviceManager.getServices().toArray(
new CompositeServiceImpl[0]);
Configuration conf = new Configuration();
// Initialise the composite service
serviceManager.init(conf);
serviceManager.start();
// Stop the composite service
try {
serviceManager.stop();
} catch (ServiceTestRuntimeException e) {
}
assertInState(STATE.STOPPED, services);
}
/**
* Assert that all services are in the same expected state
* @param expected expected state value
* @param services services to examine
*/
private void assertInState(STATE expected, CompositeServiceImpl[] services) {
assertInState(expected, services,0, services.length);
}
/**
* Assert that all services are in the same expected state
* @param expected expected state value
* @param services services to examine
* @param start start offset
* @param finish finish offset: the count stops before this number
*/
private void assertInState(STATE expected,
CompositeServiceImpl[] services,
int start, int finish) {
for (int i = start; i < finish; i++) {
Service service = services[i];
assertInState(expected, service);
}
}
private void assertInState(STATE expected, Service service) {
assertEquals(expected, service.getServiceState(),
"Service state should have been " + expected + " in " + service);
}
/**
* Shut down from not-inited: expect nothing to have happened
*/
@Test
public void testServiceStopFromNotInited() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
// Add services
for (int i = 0; i < NUM_OF_SERVICES; i++) {
CompositeServiceImpl service = new CompositeServiceImpl(i);
serviceManager.addTestService(service);
}
CompositeServiceImpl[] services = serviceManager.getServices().toArray(
new CompositeServiceImpl[0]);
serviceManager.stop();
assertInState(STATE.NOTINITED, services);
}
/**
* Shut down from inited
*/
@Test
public void testServiceStopFromInited() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
// Add services
for (int i = 0; i < NUM_OF_SERVICES; i++) {
CompositeServiceImpl service = new CompositeServiceImpl(i);
serviceManager.addTestService(service);
}
CompositeServiceImpl[] services = serviceManager.getServices().toArray(
new CompositeServiceImpl[0]);
serviceManager.init(new Configuration());
serviceManager.stop();
if (STOP_ONLY_STARTED_SERVICES) {
//this policy => no services were stopped
assertInState(STATE.INITED, services);
} else {
assertInState(STATE.STOPPED, services);
}
}
/**
* Use a null configuration & expect a failure
* @throws Throwable
*/
@Test
public void testInitNullConf() throws Throwable {
ServiceManager serviceManager = new ServiceManager("testInitNullConf");
CompositeServiceImpl service = new CompositeServiceImpl(0);
serviceManager.addTestService(service);
try {
serviceManager.init(null);
LOG.warn("Null Configurations are permitted " + serviceManager);
} catch (ServiceStateException e) {
//expected
}
}
/**
* Walk the service through their lifecycle without any children;
* verify that it all works.
*/
@Test
public void testServiceLifecycleNoChildren() {
ServiceManager serviceManager = new ServiceManager("ServiceManager");
serviceManager.init(new Configuration());
serviceManager.start();
serviceManager.stop();
}
@Test
public void testAddServiceInInit() throws Throwable {
BreakableService child = new BreakableService();
assertInState(STATE.NOTINITED, child);
CompositeServiceAddingAChild composite =
new CompositeServiceAddingAChild(child);
composite.init(new Configuration());
assertInState(STATE.INITED, child);
}
@Test
@Timeout(value = 10)
public void testAddIfService() {
CompositeService testService = new CompositeService("TestService") {
Service service;
@Override
public void serviceInit(Configuration conf) {
Integer notAService = new Integer(0);
assertFalse(addIfService(notAService), "Added an integer as a service");
service = new AbstractService("Service") {};
assertTrue(addIfService(service), "Unable to add a service");
}
};
testService.init(new Configuration());
assertEquals(1, testService.getServices().size(),
"Incorrect number of services");
}
@Test
public void testRemoveService() {
CompositeService testService = new CompositeService("TestService") {
@Override
public void serviceInit(Configuration conf) {
Integer notAService = new Integer(0);
assertFalse(addIfService(notAService), "Added an integer as a service");
Service service1 = new AbstractService("Service1") {};
addIfService(service1);
Service service2 = new AbstractService("Service2") {};
addIfService(service2);
Service service3 = new AbstractService("Service3") {};
addIfService(service3);
removeService(service1);
}
};
testService.init(new Configuration());
assertEquals(2, testService.getServices().size(),
"Incorrect number of services");
}
//
// Tests for adding child service to parent
//
@Test
@Timeout(value = 10)
public void testAddUninitedChildBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
AddSiblingService.addChildToService(parent, child);
parent.init(new Configuration());
assertInState(STATE.INITED, child);
parent.start();
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddUninitedChildInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
parent.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.NOTINITED, child);
try {
parent.start();
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
assertInState(STATE.NOTINITED, child);
parent.stop();
assertInState(STATE.NOTINITED, child);
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddUninitedChildInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
parent.init(new Configuration());
parent.start();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.NOTINITED, child);
parent.stop();
assertInState(STATE.NOTINITED, child);
}
@Test
@Timeout(value = 10)
public void testAddUninitedChildInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
parent.init(new Configuration());
parent.start();
parent.stop();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.NOTINITED, child);
}
@Test
@Timeout(value = 10)
public void testAddInitedChildBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
parent.init(new Configuration());
assertInState(STATE.INITED, child);
parent.start();
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddInitedChildInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
parent.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
parent.start();
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddInitedChildInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
parent.init(new Configuration());
parent.start();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.INITED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddInitedChildInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
parent.init(new Configuration());
parent.start();
parent.stop();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.INITED, child);
}
@Test
@Timeout(value = 10)
public void testAddStartedChildBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
AddSiblingService.addChildToService(parent, child);
try {
parent.init(new Configuration());
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
parent.stop();
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStartedChildInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
parent.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
parent.start();
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddStartedChildInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
parent.init(new Configuration());
parent.start();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
@Test
@Timeout(value = 10)
public void testAddStartedChildInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
parent.init(new Configuration());
parent.start();
parent.stop();
AddSiblingService.addChildToService(parent, child);
assertInState(STATE.STARTED, child);
}
@Test
@Timeout(value = 10)
public void testAddStoppedChildBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
child.stop();
AddSiblingService.addChildToService(parent, child);
try {
parent.init(new Configuration());
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
parent.stop();
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedChildInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
child.stop();
parent.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
try {
parent.start();
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
assertInState(STATE.STOPPED, child);
parent.stop();
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedChildInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
child.stop();
parent.init(new Configuration());
parent.start();
AddSiblingService.addChildToService(parent, child);
parent.stop();
}
@Test
@Timeout(value = 10)
public void testAddStoppedChildInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
child.start();
child.stop();
parent.init(new Configuration());
parent.start();
parent.stop();
AddSiblingService.addChildToService(parent, child);
}
//
// Tests for adding sibling service to parent
//
@Test
@Timeout(value = 10)
public void testAddUninitedSiblingBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.NOTINITED));
parent.init(new Configuration());
assertInState(STATE.NOTINITED, sibling);
parent.start();
assertInState(STATE.NOTINITED, sibling);
parent.stop();
assertInState(STATE.NOTINITED, sibling);
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddUninitedSiblingInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.INITED));
parent.init(new Configuration());
try {
parent.start();
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
parent.stop();
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddUninitedSiblingInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STARTED));
parent.init(new Configuration());
assertInState(STATE.NOTINITED, sibling);
parent.start();
assertInState(STATE.NOTINITED, sibling);
parent.stop();
assertInState(STATE.NOTINITED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddUninitedSiblingInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STOPPED));
parent.init(new Configuration());
assertInState(STATE.NOTINITED, sibling);
parent.start();
assertInState(STATE.NOTINITED, sibling);
parent.stop();
assertInState(STATE.NOTINITED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddInitedSiblingBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
parent.addService(new AddSiblingService(parent,
sibling,
STATE.NOTINITED));
parent.init(new Configuration());
assertInState(STATE.INITED, sibling);
parent.start();
assertInState(STATE.INITED, sibling);
parent.stop();
assertInState(STATE.INITED, sibling);
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddInitedSiblingInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
parent.addService(new AddSiblingService(parent,
sibling,
STATE.INITED));
parent.init(new Configuration());
assertInState(STATE.INITED, sibling);
parent.start();
assertInState(STATE.STARTED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddInitedSiblingInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STARTED));
parent.init(new Configuration());
assertInState(STATE.INITED, sibling);
parent.start();
assertInState(STATE.INITED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddInitedSiblingInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STOPPED));
parent.init(new Configuration());
}
@Test
@Timeout(value = 10)
public void testAddStartedSiblingBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.NOTINITED));
parent.init(new Configuration());
assertInState(STATE.STARTED, sibling);
parent.start();
assertInState(STATE.STARTED, sibling);
parent.stop();
assertInState(STATE.STARTED, sibling);
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStartedSiblingInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.INITED));
parent.init(new Configuration());
assertInState(STATE.STARTED, sibling);
parent.start();
assertInState(STATE.STARTED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStartedSiblingInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STARTED));
parent.init(new Configuration());
assertInState(STATE.STARTED, sibling);
parent.start();
assertInState(STATE.STARTED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStartedSiblingInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STOPPED));
parent.init(new Configuration());
assertInState(STATE.STARTED, sibling);
parent.start();
assertInState(STATE.STARTED, sibling);
parent.stop();
assertInState(STATE.STARTED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedSiblingBeforeInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
sibling.stop();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.NOTINITED));
parent.init(new Configuration());
assertInState(STATE.STOPPED, sibling);
parent.start();
assertInState(STATE.STOPPED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(1, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedSiblingInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
sibling.stop();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.INITED));
parent.init(new Configuration());
assertInState(STATE.STOPPED, sibling);
try {
parent.start();
fail("Expected an exception, got " + parent);
} catch (ServiceStateException e) {
//expected
}
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedSiblingInStart() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
sibling.stop();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STARTED));
parent.init(new Configuration());
assertInState(STATE.STOPPED, sibling);
parent.start();
assertInState(STATE.STOPPED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
@Test
@Timeout(value = 10)
public void testAddStoppedSiblingInStop() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService sibling = new BreakableService();
sibling.init(new Configuration());
sibling.start();
sibling.stop();
parent.addService(new AddSiblingService(parent,
sibling,
STATE.STOPPED));
parent.init(new Configuration());
assertInState(STATE.STOPPED, sibling);
parent.start();
assertInState(STATE.STOPPED, sibling);
parent.stop();
assertInState(STATE.STOPPED, sibling);
assertEquals(2, parent.getServices().size(),
"Incorrect number of services");
}
public static class CompositeServiceAddingAChild extends CompositeService{
Service child;
public CompositeServiceAddingAChild(Service child) {
super("CompositeServiceAddingAChild");
this.child = child;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
addService(child);
super.serviceInit(conf);
}
}
public static class ServiceTestRuntimeException extends RuntimeException {
public ServiceTestRuntimeException(String message) {
super(message);
}
}
/**
* This is a composite service that keeps a count of the number of lifecycle
* events called, and can be set to throw a {@link ServiceTestRuntimeException }
* during service start or stop
*/
public static class CompositeServiceImpl extends CompositeService {
public static boolean isPolicyToStopOnlyStartedServices() {
return STOP_ONLY_STARTED_SERVICES;
}
private static int counter = -1;
private int callSequenceNumber = -1;
private boolean throwExceptionOnStart;
private boolean throwExceptionOnStop;
public CompositeServiceImpl(int sequenceNumber) {
super(Integer.toString(sequenceNumber));
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
counter++;
callSequenceNumber = counter;
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
if (throwExceptionOnStart) {
throw new ServiceTestRuntimeException("Fake service start exception");
}
counter++;
callSequenceNumber = counter;
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
counter++;
callSequenceNumber = counter;
if (throwExceptionOnStop) {
throw new ServiceTestRuntimeException("Fake service stop exception");
}
super.serviceStop();
}
public static int getCounter() {
return counter;
}
public int getCallSequenceNumber() {
return callSequenceNumber;
}
public void reset() {
callSequenceNumber = -1;
counter = -1;
}
public static void resetCounter() {
counter = -1;
}
public void setThrowExceptionOnStart(boolean throwExceptionOnStart) {
this.throwExceptionOnStart = throwExceptionOnStart;
}
public void setThrowExceptionOnStop(boolean throwExceptionOnStop) {
this.throwExceptionOnStop = throwExceptionOnStop;
}
@Override
public String toString() {
return "Service " + getName();
}
}
/**
* Composite service that makes the addService method public to all
*/
public static class ServiceManager extends CompositeService {
public void addTestService(CompositeService service) {
addService(service);
}
public ServiceManager(String name) {
super(name);
}
}
public static class AddSiblingService extends CompositeService {
private final CompositeService parent;
private final Service serviceToAdd;
private STATE triggerState;
public AddSiblingService(CompositeService parent,
Service serviceToAdd,
STATE triggerState) {
super("ParentStateManipulatorService");
this.parent = parent;
this.serviceToAdd = serviceToAdd;
this.triggerState = triggerState;
}
/**
* Add the serviceToAdd to the parent if this service
* is in the state requested
*/
private void maybeAddSibling() {
if (getServiceState() == triggerState) {
parent.addService(serviceToAdd);
}
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
maybeAddSibling();
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
maybeAddSibling();
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
maybeAddSibling();
super.serviceStop();
}
/**
* Expose addService method
* @param parent parent service
* @param child child to add
*/
public static void addChildToService(CompositeService parent, Service child) {
parent.addService(child);
}
}
}
|
googleapis/google-cloud-java | 35,687 | java-vmwareengine/proto-google-cloud-vmwareengine-v1/src/main/java/com/google/cloud/vmwareengine/v1/DeleteManagementDnsZoneBindingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vmwareengine/v1/vmwareengine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vmwareengine.v1;
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest}
*/
public final class DeleteManagementDnsZoneBindingRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest)
DeleteManagementDnsZoneBindingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteManagementDnsZoneBindingRequest.newBuilder() to construct.
private DeleteManagementDnsZoneBindingRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteManagementDnsZoneBindingRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteManagementDnsZoneBindingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteManagementDnsZoneBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteManagementDnsZoneBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest.class,
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest)) {
return super.equals(obj);
}
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest other =
(com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.DeleteManagementDnsZoneBinding][google.cloud.vmwareengine.v1.VmwareEngine.DeleteManagementDnsZoneBinding]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest)
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteManagementDnsZoneBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteManagementDnsZoneBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest.class,
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest.Builder.class);
}
// Construct using
// com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteManagementDnsZoneBindingRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
getDefaultInstanceForType() {
return com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest build() {
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest buildPartial() {
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest result =
new com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest) {
return mergeFrom(
(com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest other) {
if (other
== com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the management DNS zone binding to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1-a/privateClouds/my-cloud/managementDnsZoneBindings/my-management-dns-zone-binding`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest)
private static final com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest();
}
public static com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteManagementDnsZoneBindingRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteManagementDnsZoneBindingRequest>() {
@java.lang.Override
public DeleteManagementDnsZoneBindingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteManagementDnsZoneBindingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteManagementDnsZoneBindingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteManagementDnsZoneBindingRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jackrabbit-oak | 35,021 | oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/observation/ChangeCollectorProviderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.observation;
import static org.apache.jackrabbit.oak.commons.PathUtils.concat;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.jcr.NoSuchWorkspaceException;
import javax.security.auth.login.LoginException;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.ContentRepository;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.InitialContent;
import org.apache.jackrabbit.oak.commons.jdkcompat.Java23Subject;
import org.apache.jackrabbit.oak.security.internal.SecurityProviderBuilder;
import org.apache.jackrabbit.oak.spi.commit.CommitContext;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.Observer;
import org.apache.jackrabbit.oak.spi.commit.SimpleCommitContext;
import org.apache.jackrabbit.oak.spi.observation.ChangeSet;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.SystemSubject;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.jetbrains.annotations.NotNull;
import org.junit.Before;
import org.junit.Test;
public class ChangeCollectorProviderTest {
ChangeCollectorProvider collectorProvider;
private ContentRepository contentRepository;
private ContentSession session;
private Recorder recorder;
private SecurityProvider securityProvider;
class ContentChange {
final NodeState root;
final CommitInfo info;
ContentChange(NodeState root, CommitInfo info) {
this.root = root;
this.info = info;
}
}
class Recorder implements Observer {
List<ContentChange> changes = new LinkedList<ContentChange>();
@Override
public void contentChanged(@NotNull NodeState root,@NotNull CommitInfo info) {
changes.add(new ContentChange(root, info));
}
}
protected SecurityProvider getSecurityProvider() {
if (securityProvider == null) {
securityProvider = SecurityProviderBuilder.newBuilder().build();
}
return securityProvider;
}
/**
* Checks that the actual string set provided matches the expected one. A
* match is when all elements occur, irrespective of the order.
*/
private void assertMatches(String msg, Set<String> actuals, String... expected) {
if ((actuals == null || actuals.size() == 0) && expected.length != 0) {
fail("assertion failed for '" + msg + "': expected length " + expected.length + " != actual 0."
+ " Expected: '" + Arrays.toString(expected) + "', got: '" + actuals + "'");
} else if (expected.length == 0 && actuals != null && actuals.size() != 0) {
fail("assertion failed for '" + msg + "': expected length == 0, actual " + actuals.size() + "."
+ " Expected: '" + Arrays.toString(expected) + "', got: '" + actuals + "'");
} else if (expected.length != actuals.size()) {
fail("assertion failed for '" + msg + "': expected length (" + expected.length + ") != actual ("
+ actuals.size() + ")." + " Expected: '" + Arrays.toString(expected) + "', got: '" + actuals + "'");
}
for (String anExpected : expected) {
if (!actuals.contains(anExpected)) {
fail("assertion failed for '" + msg + "': expected '" + anExpected + "' not found. Got: '" + actuals
+ "'");
}
}
}
/**
* Assumes that the recorder got 1 call, and extracts the ChangeSet from
* that call
*/
private ChangeSet getSingleChangeSet() {
assertEquals(recorder.changes.size(), 1);
CommitContext commitContext = (CommitContext) recorder.changes.get(0).info.getInfo().get(CommitContext.NAME);
assertNotNull(commitContext);
ChangeSet changeSet = (ChangeSet) commitContext
.get(ChangeSet.COMMIT_CONTEXT_OBSERVATION_CHANGESET);
assertNotNull(changeSet);
return changeSet;
}
@Before
public void setup() throws PrivilegedActionException, CommitFailedException {
collectorProvider = new ChangeCollectorProvider();
recorder = new Recorder();
Oak oak = new Oak().with(new InitialContent()).with(collectorProvider).with(recorder)
.with(getSecurityProvider());
contentRepository = oak.createContentRepository();
session = Java23Subject.doAs(SystemSubject.INSTANCE, new PrivilegedExceptionAction<ContentSession>() {
@Override
public ContentSession run() throws LoginException, NoSuchWorkspaceException {
return contentRepository.login(null, null);
}
});
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/").addChild("test");
rootTree.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:parentType", Type.NAME);
Tree child1 = rootTree.addChild("child1");
child1.setProperty("child1Prop", 1);
child1.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:childType", Type.NAME);
Tree grandChild1 = child1.addChild("grandChild1");
grandChild1.setProperty("grandChild1Prop", 1);
grandChild1.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:grandChildType", Type.NAME);
Tree greatGrandChild1 = grandChild1.addChild("greatGrandChild1");
greatGrandChild1.setProperty("greatGrandChild1Prop", 1);
greatGrandChild1.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:greatGrandChildType", Type.NAME);
Tree child2 = rootTree.addChild("child2");
child2.setProperty("child2Prop", 1);
child2.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:childType", Type.NAME);
Tree grandChild2 = child2.addChild("grandChild2");
grandChild2.setProperty("grandChild2Prop", 1);
grandChild2.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:grandChildType", Type.NAME);
recorder.changes.clear();
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child2", "/test/child1",
"/test/child1/grandChild1/greatGrandChild1", "/", "/test", "/test/child1/grandChild1",
"/test/child2/grandChild2");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "child2", "child1", "greatGrandChild1", "test",
"grandChild1", "grandChild2");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "child1Prop",
"child2Prop", "grandChild1Prop", "grandChild2Prop", "greatGrandChild1Prop");
// clear the recorder so that we start off empty
recorder.changes.clear();
}
private static CommitInfo newCommitInfoWithCommitContext(String sessionId, String userId) {
return new CommitInfo(sessionId, userId,
Map.of(CommitContext.NAME, new SimpleCommitContext()));
}
@Test
public void testNull() {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("test");
builder.setChildNode("a1").setChildNode("b1").setProperty("p1", 1);
NodeState before = builder.getNodeState();
builder = before.builder();
builder.setChildNode("a2").setChildNode("b12").setProperty("p12", "12");
NodeState after = builder.getNodeState();
assertNull(collectorProvider.getRootValidator(before, after, null));
assertNull(collectorProvider.getRootValidator(before, after, CommitInfo.EMPTY));
assertNotNull(collectorProvider.getRootValidator(before, after,
newCommitInfoWithCommitContext(CommitInfo.OAK_UNKNOWN, CommitInfo.OAK_UNKNOWN)));
}
@Test
public void testRemoveChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
assertTrue(rootTree.getChild("child1").remove());
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/child1", "/test/child1/grandChild1",
"/test/child1/grandChild1/greatGrandChild1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "child1", "grandChild1",
"greatGrandChild1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "child1Prop",
"grandChild1Prop", "greatGrandChild1Prop");
}
@Test
public void testRemoveGreatGrandChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
assertTrue(rootTree.getChild("child1").getChild("grandChild1").getChild("greatGrandChild1").remove());
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child1/grandChild1/greatGrandChild1",
"/test/child1/grandChild1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "grandChild1", "greatGrandChild1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:greatGrandChildType",
"test:grandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE,
"greatGrandChild1Prop");
}
@Test
public void testChangeGreatGrandChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
rootTree.getChild("child1").getChild("grandChild1").getChild("greatGrandChild1")
.setProperty("greatGrandChild1Prop", 2);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child1/grandChild1/greatGrandChild1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "greatGrandChild1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:greatGrandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), "greatGrandChild1Prop");
}
@Test
public void testChangeGreatAndGrandChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
rootTree.getChild("child1").getChild("grandChild1").setProperty("grandChild1Prop", 2);
rootTree.getChild("child1").getChild("grandChild1").getChild("greatGrandChild1")
.setProperty("greatGrandChild1Prop", 2);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child1/grandChild1",
"/test/child1/grandChild1/greatGrandChild1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "grandChild1", "greatGrandChild1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:grandChildType",
"test:greatGrandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType", "test:greatGrandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), "grandChild1Prop", "greatGrandChild1Prop");
}
@Test
public void testAddEmptyChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
rootTree.addChild("child");
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType");
assertMatches("propertyNames", changeSet.getPropertyNames());
}
@Test
public void testAddEmptyGrandChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree child = rootTree.addChild("child");
child.addChild("grandChild");
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/child");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "child");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType");
assertMatches("propertyNames", changeSet.getPropertyNames());
}
@Test
public void testAddNonEmptyGrandChild() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree child = rootTree.addChild("child");
child.setProperty("childProperty", 1);
Tree grandChild = child.addChild("grandChild");
grandChild.setProperty("grandChildProperty", 2);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/child", "/test/child/grandChild");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "child", "grandChild");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType");
assertMatches("propertyNames", changeSet.getPropertyNames(), "childProperty", "grandChildProperty");
}
@Test
public void testAddSomeChildren() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
for (int i = 0; i < 10; i++) {
Tree child = rootTree.addChild("x" + i);
child.setProperty(JcrConstants.JCR_PRIMARYTYPE, "test:type" + i, Type.NAME);
child.setProperty("foo" + i, "bar");
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/x0", "/test/x1", "/test/x2",
"/test/x3", "/test/x4", "/test/x5", "/test/x6", "/test/x7", "/test/x8", "/test/x9");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "x0", "x1", "x2", "x3", "x4", "x5",
"x6", "x7", "x8", "x9");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "test:type0", "test:type1",
"test:type2", "test:type3", "test:type4", "test:type5", "test:type6", "test:type7", "test:type8",
"test:type9");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:type0", "test:type1",
"test:type2", "test:type3", "test:type4", "test:type5", "test:type6", "test:type7", "test:type8",
"test:type9");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "foo0", "foo1",
"foo2", "foo3", "foo4", "foo5", "foo6", "foo7", "foo8", "foo9");
}
@Test
public void testAddEmptyRemoveChildren() throws CommitFailedException {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree child = rootTree.addChild("child");
child.addChild("grandChild");
assertTrue(rootTree.getChild("child2").remove());
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/child", "/test/child2",
"/test/child2/grandChild2");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "child", "child2", "grandChild2");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType",
"test:grandChildType");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "child2Prop",
"grandChild2Prop");
}
@Test
public void testAddMaxPathDepthAll() throws CommitFailedException, PrivilegedActionException {
for (int i = 0; i < 16; i++) {
setup();
doAddMaxPathDepth(i);
}
}
private void doAddMaxPathDepth(int maxPathDepth) throws CommitFailedException {
collectorProvider.setMaxPathDepth(maxPathDepth);
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree next = rootTree;
for (int i = 0; i < 16; i++) {
next = next.addChild("n" + i);
if (i % 3 != 0) {
next.setProperty("nextProp" + i, i);
next.setProperty(JcrConstants.JCR_PRIMARYTYPE, i % 2 == 0 ? "test:even" : "test:odd", Type.NAME);
}
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
List<String> expectedParentPaths = new LinkedList<String>();
if (maxPathDepth == 0) {
expectedParentPaths.add("/");
} else {
expectedParentPaths.add("/test");
}
for (int i = 0; i < maxPathDepth - 1; i++) {
StringBuffer path = new StringBuffer("/test");
for (int j = 0; j < i; j++) {
path.append("/n" + j);
}
expectedParentPaths.add(path.toString());
}
assertMatches("parentPaths-" + maxPathDepth, changeSet.getParentPaths(),
expectedParentPaths.toArray(new String[0]));
assertMatches("parentNodeNames-" + maxPathDepth, changeSet.getParentNodeNames(), "test", "n0", "n1", "n2", "n3",
"n4", "n5", "n6", "n7", "n8", "n9", "n10", "n11", "n12", "n13",
"n14"/* , "n15" */);
assertMatches("parentNodeTypes-" + maxPathDepth, changeSet.getParentNodeTypes(), "test:parentType", "test:even",
"test:odd");
assertMatches("allNodeTypes-" + maxPathDepth, changeSet.getAllNodeTypes(), "test:parentType", "test:even", "test:odd");
assertMatches("propertyNames-" + maxPathDepth, changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE,
/* "nextProp0", */"nextProp1", "nextProp2", /* "nextProp3", */ "nextProp4",
"nextProp5"/* , "nextProp6" */
, "nextProp7", "nextProp8", /* "nextProp9", */"nextProp10", "nextProp11",
/* "nextProp12", */ "nextProp13",
"nextProp14"/* , "nextProp15" */);
}
@Test
public void testAddMixin() throws Exception {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
rootTree.addChild("child").setProperty(JcrConstants.JCR_MIXINTYPES, Arrays.asList("aMixin1", "aMixin2"),
Type.NAMES);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/child");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "child");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "aMixin1", "aMixin2");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "aMixin1", "aMixin2");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_MIXINTYPES);
}
@Test
public void testAddNodeWithProperties() throws Exception {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree aChild = rootTree.addChild("newchild");
aChild.setProperty("aProp", "aValue", Type.NAME);
aChild.setProperty(JcrConstants.JCR_PRIMARYTYPE, "aPrimaryType", Type.NAME);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test", "/test/newchild");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "test", "newchild");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType", "aPrimaryType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "aPrimaryType");
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "aProp");
}
@Test
public void testPathNotOverflown() throws Exception {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Set<String> expectedParentPaths = new HashSet<>();
expectedParentPaths.add("/test");
Set<String> expectedParentNodeNames = new HashSet<>();
expectedParentNodeNames.add("test");
Set<String> expectedParentNodeTypes = new HashSet<>();
expectedParentNodeTypes.add("test:parentType");
// do maxItems-1 iterations only, as the above already adds 1 item - to
// avoid overflowing
for (int i = 0; i < collectorProvider.getMaxItems() - 1; i++) {
Tree aChild = rootTree.addChild("manychildren" + i);
aChild.setProperty("aProperty", "foo");
aChild.setProperty(JcrConstants.JCR_PRIMARYTYPE, "aChildPrimaryType" + i, Type.NAME);
expectedParentPaths.add("/test/manychildren" + i);
expectedParentNodeNames.add("manychildren" + i);
expectedParentNodeTypes.add("aChildPrimaryType" + i);
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), expectedParentPaths.toArray(new String[0]));
assertMatches("parentNodeNames", changeSet.getParentNodeNames(),
expectedParentNodeNames.toArray(new String[0]));
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(),
expectedParentNodeTypes.toArray(new String[0]));
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(),
expectedParentNodeTypes.toArray(new String[0]));
assertMatches("propertyNames", changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE, "aProperty");
}
@Test
public void testPathOverflown() throws Exception {
doTestPathOverflown(0);
for (int overflowCnt = 1; overflowCnt <= 64 * 1024; overflowCnt += overflowCnt) {
doTestPathOverflown(overflowCnt);
}
}
private void doTestPathOverflown(int overflowCnt) throws CommitFailedException, PrivilegedActionException {
setup();
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
for (int i = 0; i < collectorProvider.getMaxItems() + overflowCnt; i++) {
rootTree.addChild("manychildren" + i).setProperty("aProperty", "foo");
;
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertEquals("parentPaths", null, changeSet.getParentPaths());
assertEquals("parentNodeNames", null, changeSet.getParentNodeNames());
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:parentType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType");
assertMatches("propertyNames", changeSet.getPropertyNames(), "aProperty");
}
@Test
public void testPropertyNotOverflown() throws Exception {
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree child1 = rootTree.getChild("child1");
Set<String> expectedPropertyNames = new HashSet<>();
for (int i = 0; i < collectorProvider.getMaxItems(); i++) {
child1.setProperty("aProperty" + i, "foo");
expectedPropertyNames.add("aProperty" + i);
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "child1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:childType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType");
assertMatches("propertyNames", changeSet.getPropertyNames(), expectedPropertyNames.toArray(new String[0]));
}
@Test
public void testPropertyOverflown() throws Exception {
for (int overflowCnt = 1; overflowCnt <= 64 * 1024; overflowCnt += overflowCnt) {
doTestPropertyOverflown(overflowCnt);
}
}
private void doTestPropertyOverflown(int overflowCnt) throws CommitFailedException, PrivilegedActionException {
setup();
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree child1 = rootTree.getChild("child1");
Set<String> expectedPropertyNames = new HashSet<>();
for (int i = 0; i < collectorProvider.getMaxItems() + overflowCnt; i++) {
child1.setProperty("aProperty" + i, "foo");
expectedPropertyNames.add("aProperty" + i);
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths", changeSet.getParentPaths(), "/test/child1");
assertMatches("parentNodeNames", changeSet.getParentNodeNames(), "child1");
assertMatches("parentNodeTypes", changeSet.getParentNodeTypes(), "test:childType");
assertMatches("allNodeTypes", changeSet.getAllNodeTypes(), "test:parentType", "test:childType");
assertEquals("propertyNames", null, changeSet.getPropertyNames());
}
@Test
public void testRemoveMaxPathDepthAll() throws CommitFailedException, PrivilegedActionException {
for (int i = 0; i < 16; i++) {
setup();
doRemoveMaxPathDepth(i);
}
}
private void doRemoveMaxPathDepth(int maxPathDepth) throws CommitFailedException {
collectorProvider.setMaxPathDepth(maxPathDepth);
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree next = rootTree;
for (int i = 0; i < 16; i++) {
next = next.addChild("n" + i);
if (i % 3 != 0) {
next.setProperty("nextProp" + i, i);
next.setProperty(JcrConstants.JCR_PRIMARYTYPE, i % 2 == 0 ? "test:even" : "test:odd", Type.NAME);
}
}
root.commit();
// now do the delete
recorder.changes.clear();
root = session.getLatestRoot();
rootTree = root.getTree("/test");
next = rootTree;
for (int i = 0; i < 15; i++) {
next = next.getChild("n" + i);
if (i == 14) {
next.remove();
}
}
root.commit();
ChangeSet changeSet = getSingleChangeSet();
Set<String> expectedParentPaths = new HashSet<String>();
String path = "/";
if (maxPathDepth == 1) {
path = "/test";
} else if (maxPathDepth > 1) {
path = "/test";
for (int i = 0; i < maxPathDepth - 1; i++) {
path = concat(path, "n" + i);
}
}
expectedParentPaths.add(path);
assertMatches("parentPaths-" + maxPathDepth, changeSet.getParentPaths(),
expectedParentPaths.toArray(new String[0]));
assertMatches("parentNodeNames-" + maxPathDepth, changeSet.getParentNodeNames(), "n13", "n14");
assertMatches("parentNodeTypes-" + maxPathDepth, changeSet.getParentNodeTypes(), "test:even", "test:odd");
assertMatches("allNodeTypes-" + maxPathDepth, changeSet.getAllNodeTypes(), "test:parentType", "test:even", "test:odd");
assertMatches("propertyNames-" + maxPathDepth, changeSet.getPropertyNames(), JcrConstants.JCR_PRIMARYTYPE,
"nextProp14");
}
@Test
public void testChangeMaxPathDepthAll() throws CommitFailedException, PrivilegedActionException {
for (int maxPathDepth = 0; maxPathDepth < 16; maxPathDepth++) {
for (int changeAt = 0; changeAt < 16; changeAt++) {
setup();
doChangeMaxPathDepth(changeAt, maxPathDepth);
}
}
}
private void doChangeMaxPathDepth(int changeAt, int maxPathDepth) throws CommitFailedException {
collectorProvider.setMaxPathDepth(maxPathDepth);
Root root = session.getLatestRoot();
Tree rootTree = root.getTree("/test");
Tree next = rootTree;
for (int i = 0; i < 16; i++) {
next = next.addChild("n" + i);
if (i % 3 != 0) {
next.setProperty("nextProp" + i, i);
next.setProperty(JcrConstants.JCR_PRIMARYTYPE, i % 2 == 0 ? "test:even" : "test:odd", Type.NAME);
}
}
root.commit();
recorder.changes.clear();
// now do the change
root = session.getLatestRoot();
rootTree = root.getTree("/test");
next = rootTree;
List<String> expectedParentPaths = new LinkedList<String>();
List<String> expectedParentNodeNames = new LinkedList<String>();
Set<String> expectedAllNodeTypes = new HashSet<String>();
List<String> expectedParentNodeTypes = new LinkedList<String>();
List<String> expectedPropertyNames = new LinkedList<String>();
expectedPropertyNames.add(JcrConstants.JCR_PRIMARYTYPE);
String parent = "/";
if (maxPathDepth > 0) {
parent = "/test";
}
expectedAllNodeTypes.add("test:parentType");
for (int i = 0; i <= changeAt; i++) {
String childName = "n" + i;
next = next.getChild(childName);
if (i < maxPathDepth - 1) {
parent = concat(parent, childName);
}
final String originalNodeTypeName = i % 2 == 0 ? "test:even" : "test:odd";
if (i % 3 != 0) {
if (i == changeAt) {
expectedParentNodeTypes.add(originalNodeTypeName);
}
expectedAllNodeTypes.add(originalNodeTypeName);
}
if (i == changeAt) {
expectedParentNodeNames.add(next.getName());
String propertyName = "nextProp" + i;
next.setProperty(propertyName, i + 1);
expectedPropertyNames.add(propertyName);
final String changedNodeTypeName = i % 2 == 0 ? "test:evenChanged" : "test:oddChanged";
expectedParentNodeTypes.add(changedNodeTypeName);
expectedAllNodeTypes.add(changedNodeTypeName);
next.setProperty(JcrConstants.JCR_PRIMARYTYPE, changedNodeTypeName, Type.NAME);
}
}
expectedParentPaths.add(parent);
root.commit();
ChangeSet changeSet = getSingleChangeSet();
assertMatches("parentPaths-" + changeAt + "-" + maxPathDepth, changeSet.getParentPaths(),
expectedParentPaths.toArray(new String[0]));
assertMatches("parentNodeNames-" + changeAt + "-" + maxPathDepth, changeSet.getParentNodeNames(),
expectedParentNodeNames.toArray(new String[0]));
assertMatches("parentNodeTypes-" + changeAt + "-" + maxPathDepth, changeSet.getParentNodeTypes(),
expectedParentNodeTypes.toArray(new String[0]));
assertMatches("allNodeTypes-" + changeAt + "-" + maxPathDepth, changeSet.getAllNodeTypes(),
expectedAllNodeTypes.toArray(new String[0]));
assertMatches("propertyNames-" + changeAt + "-" + maxPathDepth, changeSet.getPropertyNames(),
expectedPropertyNames.toArray(new String[0]));
}
}
|
googleapis/google-cloud-java | 35,603 | java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/MessageTransform.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/analyticshub/v1/pubsub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.analyticshub.v1;
/**
*
*
* <pre>
* All supported message transforms types.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.MessageTransform}
*/
public final class MessageTransform extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.MessageTransform)
MessageTransformOrBuilder {
private static final long serialVersionUID = 0L;
// Use MessageTransform.newBuilder() to construct.
private MessageTransform(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MessageTransform() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MessageTransform();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_MessageTransform_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_MessageTransform_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.MessageTransform.class,
com.google.cloud.bigquery.analyticshub.v1.MessageTransform.Builder.class);
}
private int transformCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object transform_;
public enum TransformCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
JAVASCRIPT_UDF(2),
TRANSFORM_NOT_SET(0);
private final int value;
private TransformCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static TransformCase valueOf(int value) {
return forNumber(value);
}
public static TransformCase forNumber(int value) {
switch (value) {
case 2:
return JAVASCRIPT_UDF;
case 0:
return TRANSFORM_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public TransformCase getTransformCase() {
return TransformCase.forNumber(transformCase_);
}
public static final int JAVASCRIPT_UDF_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the javascriptUdf field is set.
*/
@java.lang.Override
public boolean hasJavascriptUdf() {
return transformCase_ == 2;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The javascriptUdf.
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF getJavascriptUdf() {
if (transformCase_ == 2) {
return (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_;
}
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder
getJavascriptUdfOrBuilder() {
if (transformCase_ == 2) {
return (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_;
}
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
public static final int ENABLED_FIELD_NUMBER = 3;
private boolean enabled_ = false;
/**
*
*
* <pre>
* Optional. This field is deprecated, use the `disabled` field to disable
* transforms.
* </pre>
*
* <code>bool enabled = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code>
*
* @deprecated google.cloud.bigquery.analyticshub.v1.MessageTransform.enabled is deprecated. See
* google/cloud/bigquery/analyticshub/v1/pubsub.proto;l=456
* @return The enabled.
*/
@java.lang.Override
@java.lang.Deprecated
public boolean getEnabled() {
return enabled_;
}
public static final int DISABLED_FIELD_NUMBER = 4;
private boolean disabled_ = false;
/**
*
*
* <pre>
* Optional. If true, the transform is disabled and will not be applied to
* messages. Defaults to `false`.
* </pre>
*
* <code>bool disabled = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The disabled.
*/
@java.lang.Override
public boolean getDisabled() {
return disabled_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (transformCase_ == 2) {
output.writeMessage(2, (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_);
}
if (enabled_ != false) {
output.writeBool(3, enabled_);
}
if (disabled_ != false) {
output.writeBool(4, disabled_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (transformCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_);
}
if (enabled_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, enabled_);
}
if (disabled_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, disabled_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.MessageTransform)) {
return super.equals(obj);
}
com.google.cloud.bigquery.analyticshub.v1.MessageTransform other =
(com.google.cloud.bigquery.analyticshub.v1.MessageTransform) obj;
if (getEnabled() != other.getEnabled()) return false;
if (getDisabled() != other.getDisabled()) return false;
if (!getTransformCase().equals(other.getTransformCase())) return false;
switch (transformCase_) {
case 2:
if (!getJavascriptUdf().equals(other.getJavascriptUdf())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENABLED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnabled());
hash = (37 * hash) + DISABLED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDisabled());
switch (transformCase_) {
case 2:
hash = (37 * hash) + JAVASCRIPT_UDF_FIELD_NUMBER;
hash = (53 * hash) + getJavascriptUdf().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.analyticshub.v1.MessageTransform prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* All supported message transforms types.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.MessageTransform}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.MessageTransform)
com.google.cloud.bigquery.analyticshub.v1.MessageTransformOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_MessageTransform_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_MessageTransform_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.MessageTransform.class,
com.google.cloud.bigquery.analyticshub.v1.MessageTransform.Builder.class);
}
// Construct using com.google.cloud.bigquery.analyticshub.v1.MessageTransform.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (javascriptUdfBuilder_ != null) {
javascriptUdfBuilder_.clear();
}
enabled_ = false;
disabled_ = false;
transformCase_ = 0;
transform_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_MessageTransform_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.MessageTransform getDefaultInstanceForType() {
return com.google.cloud.bigquery.analyticshub.v1.MessageTransform.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.MessageTransform build() {
com.google.cloud.bigquery.analyticshub.v1.MessageTransform result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.MessageTransform buildPartial() {
com.google.cloud.bigquery.analyticshub.v1.MessageTransform result =
new com.google.cloud.bigquery.analyticshub.v1.MessageTransform(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.analyticshub.v1.MessageTransform result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.enabled_ = enabled_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.disabled_ = disabled_;
}
}
private void buildPartialOneofs(
com.google.cloud.bigquery.analyticshub.v1.MessageTransform result) {
result.transformCase_ = transformCase_;
result.transform_ = this.transform_;
if (transformCase_ == 2 && javascriptUdfBuilder_ != null) {
result.transform_ = javascriptUdfBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.analyticshub.v1.MessageTransform) {
return mergeFrom((com.google.cloud.bigquery.analyticshub.v1.MessageTransform) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.analyticshub.v1.MessageTransform other) {
if (other == com.google.cloud.bigquery.analyticshub.v1.MessageTransform.getDefaultInstance())
return this;
if (other.getEnabled() != false) {
setEnabled(other.getEnabled());
}
if (other.getDisabled() != false) {
setDisabled(other.getDisabled());
}
switch (other.getTransformCase()) {
case JAVASCRIPT_UDF:
{
mergeJavascriptUdf(other.getJavascriptUdf());
break;
}
case TRANSFORM_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
input.readMessage(getJavascriptUdfFieldBuilder().getBuilder(), extensionRegistry);
transformCase_ = 2;
break;
} // case 18
case 24:
{
enabled_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 24
case 32:
{
disabled_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int transformCase_ = 0;
private java.lang.Object transform_;
public TransformCase getTransformCase() {
return TransformCase.forNumber(transformCase_);
}
public Builder clearTransform() {
transformCase_ = 0;
transform_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder>
javascriptUdfBuilder_;
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the javascriptUdf field is set.
*/
@java.lang.Override
public boolean hasJavascriptUdf() {
return transformCase_ == 2;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The javascriptUdf.
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF getJavascriptUdf() {
if (javascriptUdfBuilder_ == null) {
if (transformCase_ == 2) {
return (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_;
}
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
} else {
if (transformCase_ == 2) {
return javascriptUdfBuilder_.getMessage();
}
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setJavascriptUdf(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF value) {
if (javascriptUdfBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
transform_ = value;
onChanged();
} else {
javascriptUdfBuilder_.setMessage(value);
}
transformCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setJavascriptUdf(
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder builderForValue) {
if (javascriptUdfBuilder_ == null) {
transform_ = builderForValue.build();
onChanged();
} else {
javascriptUdfBuilder_.setMessage(builderForValue.build());
}
transformCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeJavascriptUdf(
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF value) {
if (javascriptUdfBuilder_ == null) {
if (transformCase_ == 2
&& transform_
!= com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance()) {
transform_ =
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.newBuilder(
(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_)
.mergeFrom(value)
.buildPartial();
} else {
transform_ = value;
}
onChanged();
} else {
if (transformCase_ == 2) {
javascriptUdfBuilder_.mergeFrom(value);
} else {
javascriptUdfBuilder_.setMessage(value);
}
}
transformCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearJavascriptUdf() {
if (javascriptUdfBuilder_ == null) {
if (transformCase_ == 2) {
transformCase_ = 0;
transform_ = null;
onChanged();
}
} else {
if (transformCase_ == 2) {
transformCase_ = 0;
transform_ = null;
}
javascriptUdfBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder
getJavascriptUdfBuilder() {
return getJavascriptUdfFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder
getJavascriptUdfOrBuilder() {
if ((transformCase_ == 2) && (javascriptUdfBuilder_ != null)) {
return javascriptUdfBuilder_.getMessageOrBuilder();
} else {
if (transformCase_ == 2) {
return (com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_;
}
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. JavaScript User Defined Function. If multiple JavaScriptUDF's
* are specified on a resource, each must have a unique `function_name`.
* </pre>
*
* <code>
* .google.cloud.bigquery.analyticshub.v1.JavaScriptUDF javascript_udf = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder>
getJavascriptUdfFieldBuilder() {
if (javascriptUdfBuilder_ == null) {
if (!(transformCase_ == 2)) {
transform_ = com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
javascriptUdfBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder>(
(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) transform_,
getParentForChildren(),
isClean());
transform_ = null;
}
transformCase_ = 2;
onChanged();
return javascriptUdfBuilder_;
}
private boolean enabled_;
/**
*
*
* <pre>
* Optional. This field is deprecated, use the `disabled` field to disable
* transforms.
* </pre>
*
* <code>bool enabled = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code>
*
* @deprecated google.cloud.bigquery.analyticshub.v1.MessageTransform.enabled is deprecated. See
* google/cloud/bigquery/analyticshub/v1/pubsub.proto;l=456
* @return The enabled.
*/
@java.lang.Override
@java.lang.Deprecated
public boolean getEnabled() {
return enabled_;
}
/**
*
*
* <pre>
* Optional. This field is deprecated, use the `disabled` field to disable
* transforms.
* </pre>
*
* <code>bool enabled = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code>
*
* @deprecated google.cloud.bigquery.analyticshub.v1.MessageTransform.enabled is deprecated. See
* google/cloud/bigquery/analyticshub/v1/pubsub.proto;l=456
* @param value The enabled to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setEnabled(boolean value) {
enabled_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. This field is deprecated, use the `disabled` field to disable
* transforms.
* </pre>
*
* <code>bool enabled = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code>
*
* @deprecated google.cloud.bigquery.analyticshub.v1.MessageTransform.enabled is deprecated. See
* google/cloud/bigquery/analyticshub/v1/pubsub.proto;l=456
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearEnabled() {
bitField0_ = (bitField0_ & ~0x00000002);
enabled_ = false;
onChanged();
return this;
}
private boolean disabled_;
/**
*
*
* <pre>
* Optional. If true, the transform is disabled and will not be applied to
* messages. Defaults to `false`.
* </pre>
*
* <code>bool disabled = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The disabled.
*/
@java.lang.Override
public boolean getDisabled() {
return disabled_;
}
/**
*
*
* <pre>
* Optional. If true, the transform is disabled and will not be applied to
* messages. Defaults to `false`.
* </pre>
*
* <code>bool disabled = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The disabled to set.
* @return This builder for chaining.
*/
public Builder setDisabled(boolean value) {
disabled_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If true, the transform is disabled and will not be applied to
* messages. Defaults to `false`.
* </pre>
*
* <code>bool disabled = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearDisabled() {
bitField0_ = (bitField0_ & ~0x00000004);
disabled_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.MessageTransform)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.MessageTransform)
private static final com.google.cloud.bigquery.analyticshub.v1.MessageTransform DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.MessageTransform();
}
public static com.google.cloud.bigquery.analyticshub.v1.MessageTransform getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MessageTransform> PARSER =
new com.google.protobuf.AbstractParser<MessageTransform>() {
@java.lang.Override
public MessageTransform parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MessageTransform> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MessageTransform> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.MessageTransform getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/zookeeper | 35,687 | zookeeper-server/src/main/java/org/apache/zookeeper/server/NIOServerCnxnFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* NIOServerCnxnFactory implements a multi-threaded ServerCnxnFactory using
* NIO non-blocking socket calls. Communication between threads is handled via
* queues.
*
* - 1 accept thread, which accepts new connections and assigns to a
* selector thread
* - 1-N selector threads, each of which selects on 1/N of the connections.
* The reason the factory supports more than one selector thread is that
* with large numbers of connections, select() itself can become a
* performance bottleneck.
* - 0-M socket I/O worker threads, which perform basic socket reads and
* writes. If configured with 0 worker threads, the selector threads
* do the socket I/O directly.
* - 1 connection expiration thread, which closes idle connections; this is
* necessary to expire connections on which no session is established.
*
* Typical (default) thread counts are: on a 32 core machine, 1 accept thread,
* 1 connection expiration thread, 4 selector threads, and 64 worker threads.
*/
public class NIOServerCnxnFactory extends ServerCnxnFactory {
private static final Logger LOG = LoggerFactory.getLogger(NIOServerCnxnFactory.class);
/** Default sessionless connection timeout in ms: 10000 (10s) */
public static final String ZOOKEEPER_NIO_SESSIONLESS_CNXN_TIMEOUT = "zookeeper.nio.sessionlessCnxnTimeout";
/**
* With 500 connections to an observer with watchers firing on each, is
* unable to exceed 1GigE rates with only 1 selector.
* Defaults to using 2 selector threads with 8 cores and 4 with 32 cores.
* Expressed as sqrt(numCores/2). Must have at least 1 selector thread.
*/
public static final String ZOOKEEPER_NIO_NUM_SELECTOR_THREADS = "zookeeper.nio.numSelectorThreads";
/** Default: 2 * numCores */
public static final String ZOOKEEPER_NIO_NUM_WORKER_THREADS = "zookeeper.nio.numWorkerThreads";
/** Default: 64kB */
public static final String ZOOKEEPER_NIO_DIRECT_BUFFER_BYTES = "zookeeper.nio.directBufferBytes";
/** Default worker pool shutdown timeout in ms: 5000 (5s) */
public static final String ZOOKEEPER_NIO_SHUTDOWN_TIMEOUT = "zookeeper.nio.shutdownTimeout";
static {
Thread.setDefaultUncaughtExceptionHandler((t, e) -> LOG.error("Thread {} died", t, e));
/**
* Value of 0 disables use of direct buffers and instead uses
* gathered write call.
*
* Default to using 64k direct buffers.
*/
directBufferBytes = Integer.getInteger(ZOOKEEPER_NIO_DIRECT_BUFFER_BYTES, 64 * 1024);
}
/**
* AbstractSelectThread is an abstract base class containing a few bits
* of code shared by the AcceptThread (which selects on the listen socket)
* and SelectorThread (which selects on client connections) classes.
*/
private abstract class AbstractSelectThread extends ZooKeeperThread {
protected final Selector selector;
public AbstractSelectThread(String name) throws IOException {
super(name);
// Allows the JVM to shutdown even if this thread is still running.
setDaemon(true);
this.selector = Selector.open();
}
public void wakeupSelector() {
selector.wakeup();
}
/**
* Close the selector. This should be called when the thread is about to
* exit and no operation is going to be performed on the Selector or
* SelectionKey
*/
protected void closeSelector() {
try {
selector.close();
} catch (IOException e) {
LOG.warn("ignored exception during selector close.", e);
}
}
protected void cleanupSelectionKey(SelectionKey key) {
if (key != null) {
try {
key.cancel();
} catch (Exception ex) {
LOG.debug("ignoring exception during selectionkey cancel", ex);
}
}
}
protected void fastCloseSock(SocketChannel sc) {
if (sc != null) {
try {
// Hard close immediately, discarding buffers
sc.socket().setSoLinger(true, 0);
} catch (SocketException e) {
LOG.warn("Unable to set socket linger to 0, socket close may stall in CLOSE_WAIT", e);
}
NIOServerCnxn.closeSock(sc);
}
}
}
/**
* There is a single AcceptThread which accepts new connections and assigns
* them to a SelectorThread using a simple round-robin scheme to spread
* them across the SelectorThreads. It enforces maximum number of
* connections per IP and attempts to cope with running out of file
* descriptors by briefly sleeping before retrying.
*/
private class AcceptThread extends AbstractSelectThread {
private final ServerSocketChannel acceptSocket;
private final SelectionKey acceptKey;
private final RateLogger acceptErrorLogger = new RateLogger(LOG);
private final Collection<SelectorThread> selectorThreads;
private Iterator<SelectorThread> selectorIterator;
private volatile boolean reconfiguring = false;
public AcceptThread(ServerSocketChannel ss, InetSocketAddress addr, Set<SelectorThread> selectorThreads) throws IOException {
super("NIOServerCxnFactory.AcceptThread:" + addr);
this.acceptSocket = ss;
this.acceptKey = acceptSocket.register(selector, SelectionKey.OP_ACCEPT);
this.selectorThreads = Collections.unmodifiableList(new ArrayList<SelectorThread>(selectorThreads));
selectorIterator = this.selectorThreads.iterator();
}
public void run() {
try {
while (!stopped && !acceptSocket.socket().isClosed()) {
try {
select();
} catch (RuntimeException e) {
LOG.warn("Ignoring unexpected runtime exception", e);
} catch (Exception e) {
LOG.warn("Ignoring unexpected exception", e);
}
}
} finally {
closeSelector();
// This will wake up the selector threads, and tell the
// worker thread pool to begin shutdown.
if (!reconfiguring) {
NIOServerCnxnFactory.this.stop();
}
LOG.info("accept thread exited run method");
}
}
public void setReconfiguring() {
reconfiguring = true;
}
private void select() {
try {
selector.select();
Iterator<SelectionKey> selectedKeys = selector.selectedKeys().iterator();
while (!stopped && selectedKeys.hasNext()) {
SelectionKey key = selectedKeys.next();
selectedKeys.remove();
if (!key.isValid()) {
continue;
}
if (key.isAcceptable()) {
if (!doAccept()) {
// If unable to pull a new connection off the accept
// queue, pause accepting to give us time to free
// up file descriptors and so the accept thread
// doesn't spin in a tight loop.
pauseAccept(10);
}
} else {
LOG.warn("Unexpected ops in accept select {}", key.readyOps());
}
}
} catch (IOException e) {
LOG.warn("Ignoring IOException while selecting", e);
}
}
/**
* Mask off the listen socket interest ops and use select() to sleep
* so that other threads can wake us up by calling wakeup() on the
* selector.
*/
private void pauseAccept(long millisecs) {
acceptKey.interestOps(0);
try {
selector.select(millisecs);
} catch (IOException e) {
// ignore
} finally {
acceptKey.interestOps(SelectionKey.OP_ACCEPT);
}
}
/**
* Accept new socket connections. Enforces maximum number of connections
* per client IP address. Round-robin assigns to selector thread for
* handling. Returns whether pulled a connection off the accept queue
* or not. If encounters an error attempts to fast close the socket.
*
* @return whether was able to accept a connection or not
*/
private boolean doAccept() {
boolean accepted = false;
SocketChannel sc = null;
try {
sc = acceptSocket.accept();
accepted = true;
if (limitTotalNumberOfCnxns()) {
throw new IOException("Too many connections max allowed is " + maxCnxns);
}
InetAddress ia = sc.socket().getInetAddress();
int cnxncount = getClientCnxnCount(ia);
if (maxClientCnxns > 0 && cnxncount >= maxClientCnxns) {
throw new IOException("Too many connections from " + ia + " - max is " + maxClientCnxns);
}
LOG.debug("Accepted socket connection from {}", sc.socket().getRemoteSocketAddress());
sc.configureBlocking(false);
// Round-robin assign this connection to a selector thread
if (!selectorIterator.hasNext()) {
selectorIterator = selectorThreads.iterator();
}
SelectorThread selectorThread = selectorIterator.next();
if (!selectorThread.addAcceptedConnection(sc)) {
throw new IOException("Unable to add connection to selector queue"
+ (stopped ? " (shutdown in progress)" : ""));
}
acceptErrorLogger.flush();
} catch (IOException e) {
// accept, maxClientCnxns, configureBlocking
ServerMetrics.getMetrics().CONNECTION_REJECTED.add(1);
acceptErrorLogger.rateLimitLog("Error accepting new connection: " + e.getMessage());
fastCloseSock(sc);
}
return accepted;
}
}
/**
* The SelectorThread receives newly accepted connections from the
* AcceptThread and is responsible for selecting for I/O readiness
* across the connections. This thread is the only thread that performs
* any non-threadsafe or potentially blocking calls on the selector
* (registering new connections and reading/writing interest ops).
*
* Assignment of a connection to a SelectorThread is permanent and only
* one SelectorThread will ever interact with the connection. There are
* 1-N SelectorThreads, with connections evenly apportioned between the
* SelectorThreads.
*
* If there is a worker thread pool, when a connection has I/O to perform
* the SelectorThread removes it from selection by clearing its interest
* ops and schedules the I/O for processing by a worker thread. When the
* work is complete, the connection is placed on the ready queue to have
* its interest ops restored and resume selection.
*
* If there is no worker thread pool, the SelectorThread performs the I/O
* directly.
*/
public class SelectorThread extends AbstractSelectThread {
private final int id;
private final Queue<SocketChannel> acceptedQueue;
private final Queue<SelectionKey> updateQueue;
public SelectorThread(int id) throws IOException {
super("NIOServerCxnFactory.SelectorThread-" + id);
this.id = id;
acceptedQueue = new LinkedBlockingQueue<>();
updateQueue = new LinkedBlockingQueue<>();
}
/**
* Place new accepted connection onto a queue for adding. Do this
* so only the selector thread modifies what keys are registered
* with the selector.
*/
public boolean addAcceptedConnection(SocketChannel accepted) {
if (stopped || !acceptedQueue.offer(accepted)) {
return false;
}
wakeupSelector();
return true;
}
/**
* Place interest op update requests onto a queue so that only the
* selector thread modifies interest ops, because interest ops
* reads/sets are potentially blocking operations if other select
* operations are happening.
*/
public boolean addInterestOpsUpdateRequest(SelectionKey sk) {
if (stopped || !updateQueue.offer(sk)) {
return false;
}
wakeupSelector();
return true;
}
/**
* The main loop for the thread selects() on the connections and
* dispatches ready I/O work requests, then registers all pending
* newly accepted connections and updates any interest ops on the
* queue.
*/
public void run() {
try {
while (!stopped) {
try {
select();
processAcceptedConnections();
processInterestOpsUpdateRequests();
} catch (RuntimeException e) {
LOG.warn("Ignoring unexpected runtime exception", e);
} catch (Exception e) {
LOG.warn("Ignoring unexpected exception", e);
}
}
// Close connections still pending on the selector. Any others
// with in-flight work, let drain out of the work queue.
for (SelectionKey key : selector.keys()) {
NIOServerCnxn cnxn = (NIOServerCnxn) key.attachment();
if (cnxn.isSelectable()) {
cnxn.close(ServerCnxn.DisconnectReason.SERVER_SHUTDOWN);
}
cleanupSelectionKey(key);
}
SocketChannel accepted;
while ((accepted = acceptedQueue.poll()) != null) {
fastCloseSock(accepted);
}
updateQueue.clear();
} finally {
closeSelector();
// This will wake up the accept thread and the other selector
// threads, and tell the worker thread pool to begin shutdown.
NIOServerCnxnFactory.this.stop();
LOG.info("selector thread exited run method");
}
}
private void select() {
try {
selector.select();
Set<SelectionKey> selected = selector.selectedKeys();
ArrayList<SelectionKey> selectedList = new ArrayList<>(selected);
Collections.shuffle(selectedList);
Iterator<SelectionKey> selectedKeys = selectedList.iterator();
while (!stopped && selectedKeys.hasNext()) {
SelectionKey key = selectedKeys.next();
selected.remove(key);
if (!key.isValid()) {
cleanupSelectionKey(key);
continue;
}
if (key.isReadable() || key.isWritable()) {
handleIO(key);
} else {
LOG.warn("Unexpected ops in select {}", key.readyOps());
}
}
} catch (IOException e) {
LOG.warn("Ignoring IOException while selecting", e);
}
}
/**
* Schedule I/O for processing on the connection associated with
* the given SelectionKey. If a worker thread pool is not being used,
* I/O is run directly by this thread.
*/
private void handleIO(SelectionKey key) {
IOWorkRequest workRequest = new IOWorkRequest(this, key);
NIOServerCnxn cnxn = (NIOServerCnxn) key.attachment();
// Stop selecting this key while processing on its
// connection
cnxn.disableSelectable();
key.interestOps(0);
touchCnxn(cnxn);
workerPool.schedule(workRequest);
}
/**
* Iterate over the queue of accepted connections that have been
* assigned to this thread but not yet placed on the selector.
*/
private void processAcceptedConnections() {
SocketChannel accepted;
while (!stopped && (accepted = acceptedQueue.poll()) != null) {
SelectionKey key = null;
try {
key = accepted.register(selector, SelectionKey.OP_READ);
NIOServerCnxn cnxn = createConnection(accepted, key, this);
key.attach(cnxn);
addCnxn(cnxn);
} catch (IOException e) {
// register, createConnection
cleanupSelectionKey(key);
fastCloseSock(accepted);
}
}
}
/**
* Iterate over the queue of connections ready to resume selection,
* and restore their interest ops selection mask.
*/
private void processInterestOpsUpdateRequests() {
SelectionKey key;
while (!stopped && (key = updateQueue.poll()) != null) {
if (!key.isValid()) {
cleanupSelectionKey(key);
}
NIOServerCnxn cnxn = (NIOServerCnxn) key.attachment();
if (cnxn.isSelectable()) {
key.interestOps(cnxn.getInterestOps());
}
}
}
}
/**
* IOWorkRequest is a small wrapper class to allow doIO() calls to be
* run on a connection using a WorkerService.
*/
private class IOWorkRequest extends WorkerService.WorkRequest {
private final SelectorThread selectorThread;
private final SelectionKey key;
private final NIOServerCnxn cnxn;
IOWorkRequest(SelectorThread selectorThread, SelectionKey key) {
this.selectorThread = selectorThread;
this.key = key;
this.cnxn = (NIOServerCnxn) key.attachment();
}
public void doWork() throws InterruptedException {
if (!key.isValid()) {
selectorThread.cleanupSelectionKey(key);
return;
}
if (key.isReadable() || key.isWritable()) {
cnxn.doIO(key);
// Check if we shutdown or doIO() closed this connection
if (stopped) {
cnxn.close(ServerCnxn.DisconnectReason.SERVER_SHUTDOWN);
return;
}
if (!key.isValid()) {
selectorThread.cleanupSelectionKey(key);
return;
}
touchCnxn(cnxn);
}
// Mark this connection as once again ready for selection
cnxn.enableSelectable();
// Push an update request on the queue to resume selecting
// on the current set of interest ops, which may have changed
// as a result of the I/O operations we just performed.
if (!selectorThread.addInterestOpsUpdateRequest(key)) {
cnxn.close(ServerCnxn.DisconnectReason.CONNECTION_MODE_CHANGED);
}
}
@Override
public void cleanup() {
cnxn.close(ServerCnxn.DisconnectReason.CLEAN_UP);
}
}
/**
* This thread is responsible for closing stale connections so that
* connections on which no session is established are properly expired.
*/
private class ConnectionExpirerThread extends ZooKeeperThread {
ConnectionExpirerThread() {
super("ConnectionExpirer");
}
public void run() {
try {
while (!stopped) {
long waitTime = cnxnExpiryQueue.getWaitTime();
if (waitTime > 0) {
Thread.sleep(waitTime);
continue;
}
for (NIOServerCnxn conn : cnxnExpiryQueue.poll()) {
ServerMetrics.getMetrics().SESSIONLESS_CONNECTIONS_EXPIRED.add(1);
conn.close(ServerCnxn.DisconnectReason.CONNECTION_EXPIRED);
}
}
} catch (InterruptedException e) {
LOG.info("ConnectionExpirerThread interrupted");
}
}
}
ServerSocketChannel ss;
/**
* We use this buffer to do efficient socket I/O. Because I/O is handled
* by the worker threads (or the selector threads directly, if no worker
* thread pool is created), we can create a fixed set of these to be
* shared by connections.
*/
private static final ThreadLocal<ByteBuffer> directBuffer = new ThreadLocal<ByteBuffer>() {
@Override
protected ByteBuffer initialValue() {
return ByteBuffer.allocateDirect(directBufferBytes);
}
};
public static ByteBuffer getDirectBuffer() {
return directBufferBytes > 0 ? directBuffer.get() : null;
}
// ipMap is used to limit connections per IP
private final ConcurrentHashMap<InetAddress, Set<NIOServerCnxn>> ipMap = new ConcurrentHashMap<>();
protected int maxClientCnxns = 60;
int listenBacklog = -1;
int sessionlessCnxnTimeout;
private ExpiryQueue<NIOServerCnxn> cnxnExpiryQueue;
protected WorkerService workerPool;
private static int directBufferBytes;
private int numSelectorThreads;
private int numWorkerThreads;
private long workerShutdownTimeoutMS;
/**
* Construct a new server connection factory which will accept an unlimited number
* of concurrent connections from each client (up to the file descriptor
* limits of the operating system). startup(zks) must be called subsequently.
*/
public NIOServerCnxnFactory() {
}
private volatile boolean stopped = true;
private ConnectionExpirerThread expirerThread;
private AcceptThread acceptThread;
private final Set<SelectorThread> selectorThreads = new HashSet<>();
@Override
public void configure(InetSocketAddress addr, int maxcc, int backlog, boolean secure) throws IOException {
if (secure) {
throw new UnsupportedOperationException("SSL isn't supported in NIOServerCnxn");
}
configureSaslLogin();
maxClientCnxns = maxcc;
initMaxCnxns();
sessionlessCnxnTimeout = Integer.getInteger(ZOOKEEPER_NIO_SESSIONLESS_CNXN_TIMEOUT, 10000);
// We also use the sessionlessCnxnTimeout as expiring interval for
// cnxnExpiryQueue. These don't need to be the same, but the expiring
// interval passed into the ExpiryQueue() constructor below should be
// less than or equal to the timeout.
cnxnExpiryQueue = new ExpiryQueue<>(sessionlessCnxnTimeout);
expirerThread = new ConnectionExpirerThread();
int numCores = Runtime.getRuntime().availableProcessors();
// 32 cores sweet spot seems to be 4 selector threads
numSelectorThreads = Integer.getInteger(
ZOOKEEPER_NIO_NUM_SELECTOR_THREADS,
Math.max((int) Math.sqrt((float) numCores / 2), 1));
if (numSelectorThreads < 1) {
throw new IOException("numSelectorThreads must be at least 1");
}
numWorkerThreads = Integer.getInteger(ZOOKEEPER_NIO_NUM_WORKER_THREADS, 2 * numCores);
workerShutdownTimeoutMS = Long.getLong(ZOOKEEPER_NIO_SHUTDOWN_TIMEOUT, 5000);
String logMsg = "Configuring NIO connection handler with "
+ (sessionlessCnxnTimeout / 1000) + "s sessionless connection timeout, "
+ numSelectorThreads + " selector thread(s), "
+ (numWorkerThreads > 0 ? numWorkerThreads : "no") + " worker threads, and "
+ (directBufferBytes == 0 ? "gathered writes." : ("" + (directBufferBytes / 1024) + " kB direct buffers."));
LOG.info(logMsg);
for (int i = 0; i < numSelectorThreads; ++i) {
selectorThreads.add(new SelectorThread(i));
}
listenBacklog = backlog;
this.ss = ServerSocketChannel.open();
ss.socket().setReuseAddress(true);
LOG.info("binding to port {}", addr);
if (listenBacklog == -1) {
ss.socket().bind(addr);
} else {
ss.socket().bind(addr, listenBacklog);
}
if (addr.getPort() == 0) {
// We're likely bound to a different port than was requested, so log that too
LOG.info("bound to port {}", ss.getLocalAddress());
}
ss.configureBlocking(false);
acceptThread = new AcceptThread(ss, addr, selectorThreads);
}
private void tryClose(ServerSocketChannel s) {
try {
s.close();
} catch (IOException sse) {
LOG.error("Error while closing server socket.", sse);
}
}
@Override
public void reconfigure(InetSocketAddress addr) {
ServerSocketChannel oldSS = ss;
try {
acceptThread.setReconfiguring();
tryClose(oldSS);
acceptThread.wakeupSelector();
try {
acceptThread.join();
} catch (InterruptedException e) {
LOG.error("Error joining old acceptThread when reconfiguring client port.", e);
Thread.currentThread().interrupt();
}
this.ss = ServerSocketChannel.open();
ss.socket().setReuseAddress(true);
LOG.info("binding to port {}", addr);
ss.socket().bind(addr);
ss.configureBlocking(false);
acceptThread = new AcceptThread(ss, addr, selectorThreads);
acceptThread.start();
} catch (IOException e) {
LOG.error("Error reconfiguring client port to {}", addr, e);
tryClose(oldSS);
}
}
/** {@inheritDoc} */
public int getMaxClientCnxnsPerHost() {
return maxClientCnxns;
}
/** {@inheritDoc} */
public void setMaxClientCnxnsPerHost(int max) {
maxClientCnxns = max;
}
/** {@inheritDoc} */
public int getSocketListenBacklog() {
return listenBacklog;
}
@Override
public void start() {
stopped = false;
if (workerPool == null) {
workerPool = new WorkerService("NIOWorker", numWorkerThreads, false);
}
for (SelectorThread thread : selectorThreads) {
if (thread.getState() == Thread.State.NEW) {
thread.start();
}
}
// ensure thread is started once and only once
if (acceptThread.getState() == Thread.State.NEW) {
acceptThread.start();
}
if (expirerThread.getState() == Thread.State.NEW) {
expirerThread.start();
}
}
@Override
public void startup(ZooKeeperServer zks, boolean startServer) throws IOException, InterruptedException {
start();
setZooKeeperServer(zks);
if (startServer) {
zks.startdata();
zks.startup();
}
}
@Override
public InetSocketAddress getLocalAddress() {
return (InetSocketAddress) ss.socket().getLocalSocketAddress();
}
@Override
public int getLocalPort() {
return ss.socket().getLocalPort();
}
/**
* De-registers the connection from the various mappings maintained
* by the factory.
*/
public boolean removeCnxn(NIOServerCnxn cnxn) {
// If the connection is not in the master list it's already been closed
if (!cnxns.remove(cnxn)) {
return false;
}
cnxnExpiryQueue.remove(cnxn);
removeCnxnFromSessionMap(cnxn);
InetAddress addr = cnxn.getSocketAddress();
if (addr != null) {
Set<NIOServerCnxn> set = ipMap.get(addr);
if (set != null) {
set.remove(cnxn);
// Note that we make no effort here to remove empty mappings
// from ipMap.
}
}
// unregister from JMX
unregisterConnection(cnxn);
return true;
}
/**
* Add or update cnxn in our cnxnExpiryQueue
* @param cnxn
*/
public void touchCnxn(NIOServerCnxn cnxn) {
cnxnExpiryQueue.update(cnxn, cnxn.getSessionTimeout());
}
private void addCnxn(NIOServerCnxn cnxn) throws IOException {
InetAddress addr = cnxn.getSocketAddress();
if (addr == null) {
throw new IOException("Socket of " + cnxn + " has been closed");
}
Set<NIOServerCnxn> set = ipMap.get(addr);
if (set == null) {
// in general we will see 1 connection from each
// host, setting the initial cap to 2 allows us
// to minimize mem usage in the common case
// of 1 entry -- we need to set the initial cap
// to 2 to avoid rehash when the first entry is added
// Construct a ConcurrentHashSet using a ConcurrentHashMap
set = Collections.newSetFromMap(new ConcurrentHashMap<>(2));
// Put the new set in the map, but only if another thread
// hasn't beaten us to it
Set<NIOServerCnxn> existingSet = ipMap.putIfAbsent(addr, set);
if (existingSet != null) {
set = existingSet;
}
}
set.add(cnxn);
cnxns.add(cnxn);
touchCnxn(cnxn);
}
protected NIOServerCnxn createConnection(SocketChannel sock, SelectionKey sk, SelectorThread selectorThread) throws IOException {
return new NIOServerCnxn(zkServer, sock, sk, this, selectorThread);
}
private int getClientCnxnCount(InetAddress cl) {
Set<NIOServerCnxn> s = ipMap.get(cl);
if (s == null) {
return 0;
}
return s.size();
}
/**
* clear all the connections in the selector
*
*/
@Override
@SuppressWarnings("unchecked")
public void closeAll(ServerCnxn.DisconnectReason reason) {
// clear all the connections on which we are selecting
for (ServerCnxn cnxn : cnxns) {
try {
// This will remove the cnxn from cnxns
cnxn.close(reason);
} catch (Exception e) {
LOG.warn(
"Ignoring exception closing cnxn session id 0x{}",
Long.toHexString(cnxn.getSessionId()),
e);
}
}
}
public void stop() {
stopped = true;
// Stop queuing connection attempts
try {
ss.close();
} catch (IOException e) {
LOG.warn("Error closing listen socket", e);
}
if (acceptThread != null) {
if (acceptThread.isAlive()) {
acceptThread.wakeupSelector();
} else {
acceptThread.closeSelector();
}
}
if (expirerThread != null) {
expirerThread.interrupt();
}
for (SelectorThread thread : selectorThreads) {
if (thread.isAlive()) {
thread.wakeupSelector();
} else {
thread.closeSelector();
}
}
if (workerPool != null) {
workerPool.stop();
}
}
public void shutdown() {
try {
// close listen socket and signal selector threads to stop
stop();
// wait for selector and worker threads to shutdown
join();
// close all open connections
closeAll(ServerCnxn.DisconnectReason.SERVER_SHUTDOWN);
if (login != null) {
login.shutdown();
}
} catch (InterruptedException e) {
LOG.warn("Ignoring interrupted exception during shutdown", e);
} catch (Exception e) {
LOG.warn("Ignoring unexpected exception during shutdown", e);
}
if (zkServer != null) {
zkServer.shutdown();
}
}
@Override
public void join() throws InterruptedException {
if (acceptThread != null) {
acceptThread.join();
}
for (SelectorThread thread : selectorThreads) {
thread.join();
}
if (workerPool != null) {
workerPool.join(workerShutdownTimeoutMS);
}
}
@Override
public Iterable<ServerCnxn> getConnections() {
return cnxns;
}
public void dumpConnections(PrintWriter pwriter) {
pwriter.print("Connections ");
cnxnExpiryQueue.dump(pwriter);
}
@Override
public void resetAllConnectionStats() {
// No need to synchronize since cnxns is backed by a ConcurrentHashMap
for (ServerCnxn c : cnxns) {
c.resetStats();
}
}
@Override
public Iterable<Map<String, Object>> getAllConnectionInfo(boolean brief) {
HashSet<Map<String, Object>> info = new HashSet<>();
// No need to synchronize since cnxns is backed by a ConcurrentHashMap
for (ServerCnxn c : cnxns) {
info.add(c.getConnectionInfo(brief));
}
return info;
}
}
|
googleapis/google-cloud-java | 35,646 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CreateModelDeploymentMonitoringJobRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/job_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [JobService.CreateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1beta1.JobService.CreateModelDeploymentMonitoringJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest}
*/
public final class CreateModelDeploymentMonitoringJobRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest)
CreateModelDeploymentMonitoringJobRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateModelDeploymentMonitoringJobRequest.newBuilder() to construct.
private CreateModelDeploymentMonitoringJobRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateModelDeploymentMonitoringJobRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateModelDeploymentMonitoringJobRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CreateModelDeploymentMonitoringJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest.class,
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest.Builder
.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MODEL_DEPLOYMENT_MONITORING_JOB_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob
modelDeploymentMonitoringJob_;
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelDeploymentMonitoringJob field is set.
*/
@java.lang.Override
public boolean hasModelDeploymentMonitoringJob() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelDeploymentMonitoringJob.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob
getModelDeploymentMonitoringJob() {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJobOrBuilder
getModelDeploymentMonitoringJobOrBuilder() {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getModelDeploymentMonitoringJob());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getModelDeploymentMonitoringJob());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest other =
(com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasModelDeploymentMonitoringJob() != other.hasModelDeploymentMonitoringJob()) return false;
if (hasModelDeploymentMonitoringJob()) {
if (!getModelDeploymentMonitoringJob().equals(other.getModelDeploymentMonitoringJob()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasModelDeploymentMonitoringJob()) {
hash = (37 * hash) + MODEL_DEPLOYMENT_MONITORING_JOB_FIELD_NUMBER;
hash = (53 * hash) + getModelDeploymentMonitoringJob().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [JobService.CreateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1beta1.JobService.CreateModelDeploymentMonitoringJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest)
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CreateModelDeploymentMonitoringJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest.class,
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest.Builder
.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getModelDeploymentMonitoringJobFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
modelDeploymentMonitoringJob_ = null;
if (modelDeploymentMonitoringJobBuilder_ != null) {
modelDeploymentMonitoringJobBuilder_.dispose();
modelDeploymentMonitoringJobBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest build() {
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
buildPartial() {
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest result =
new com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.modelDeploymentMonitoringJob_ =
modelDeploymentMonitoringJobBuilder_ == null
? modelDeploymentMonitoringJob_
: modelDeploymentMonitoringJobBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasModelDeploymentMonitoringJob()) {
mergeModelDeploymentMonitoringJob(other.getModelDeploymentMonitoringJob());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getModelDeploymentMonitoringJobFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob
modelDeploymentMonitoringJob_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJobOrBuilder>
modelDeploymentMonitoringJobBuilder_;
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelDeploymentMonitoringJob field is set.
*/
public boolean hasModelDeploymentMonitoringJob() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelDeploymentMonitoringJob.
*/
public com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob
getModelDeploymentMonitoringJob() {
if (modelDeploymentMonitoringJobBuilder_ == null) {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
} else {
return modelDeploymentMonitoringJobBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob value) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
modelDeploymentMonitoringJob_ = value;
} else {
modelDeploymentMonitoringJobBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.Builder builderForValue) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
modelDeploymentMonitoringJob_ = builderForValue.build();
} else {
modelDeploymentMonitoringJobBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob value) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& modelDeploymentMonitoringJob_ != null
&& modelDeploymentMonitoringJob_
!= com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob
.getDefaultInstance()) {
getModelDeploymentMonitoringJobBuilder().mergeFrom(value);
} else {
modelDeploymentMonitoringJob_ = value;
}
} else {
modelDeploymentMonitoringJobBuilder_.mergeFrom(value);
}
if (modelDeploymentMonitoringJob_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearModelDeploymentMonitoringJob() {
bitField0_ = (bitField0_ & ~0x00000002);
modelDeploymentMonitoringJob_ = null;
if (modelDeploymentMonitoringJobBuilder_ != null) {
modelDeploymentMonitoringJobBuilder_.dispose();
modelDeploymentMonitoringJobBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.Builder
getModelDeploymentMonitoringJobBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getModelDeploymentMonitoringJobFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJobOrBuilder
getModelDeploymentMonitoringJobOrBuilder() {
if (modelDeploymentMonitoringJobBuilder_ != null) {
return modelDeploymentMonitoringJobBuilder_.getMessageOrBuilder();
} else {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJobOrBuilder>
getModelDeploymentMonitoringJobFieldBuilder() {
if (modelDeploymentMonitoringJobBuilder_ == null) {
modelDeploymentMonitoringJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1beta1.ModelDeploymentMonitoringJobOrBuilder>(
getModelDeploymentMonitoringJob(), getParentForChildren(), isClean());
modelDeploymentMonitoringJob_ = null;
}
return modelDeploymentMonitoringJobBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest)
private static final com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest();
}
public static com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest>
PARSER =
new com.google.protobuf.AbstractParser<CreateModelDeploymentMonitoringJobRequest>() {
@java.lang.Override
public CreateModelDeploymentMonitoringJobRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client | 35,745 | google-api-client/src/main/java/com/google/api/client/googleapis/media/MediaHttpUploader.java | /*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.client.googleapis.media;
import com.google.api.client.googleapis.MethodOverride;
import com.google.api.client.http.AbstractInputStreamContent;
import com.google.api.client.http.ByteArrayContent;
import com.google.api.client.http.EmptyContent;
import com.google.api.client.http.GZipEncoding;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpBackOffIOExceptionHandler;
import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler;
import com.google.api.client.http.HttpContent;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpMethods;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.InputStreamContent;
import com.google.api.client.http.MultipartContent;
import com.google.api.client.util.Beta;
import com.google.api.client.util.ByteStreams;
import com.google.api.client.util.Preconditions;
import com.google.api.client.util.Sleeper;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
/**
* Media HTTP Uploader, with support for both direct and resumable media uploads. Documentation is
* available <a
* href='https://googleapis.github.io/google-api-java-client/media-upload.html'>here</a>.
*
* <p>For resumable uploads, when the media content length is known, if the provided {@link
* InputStream} has {@link InputStream#markSupported} as {@code false} then it is wrapped in an
* {@link BufferedInputStream} to support the {@link InputStream#mark} and {@link InputStream#reset}
* methods required for handling server errors. If the media content length is unknown then each
* chunk is stored temporarily in memory. This is required to determine when the last chunk is
* reached.
*
* <p>See {@link #setDisableGZipContent(boolean)} for information on when content is gzipped and how
* to control that behavior.
*
* <p>Back-off is disabled by default. To enable it for an abnormal HTTP response and an I/O
* exception you should call {@link HttpRequest#setUnsuccessfulResponseHandler} with a new {@link
* HttpBackOffUnsuccessfulResponseHandler} instance and {@link HttpRequest#setIOExceptionHandler}
* with {@link HttpBackOffIOExceptionHandler}.
*
* <p>Upgrade warning: in prior version 1.14 exponential back-off was enabled by default for an
* abnormal HTTP response and there was a regular retry (without back-off) when I/O exception was
* thrown. Starting with version 1.15 back-off is disabled and there is no retry on I/O exception by
* default.
*
* <p>Upgrade warning: in prior version 1.16 {@link #serverErrorCallback} was public but starting
* with version 1.17 it has been removed from the public API, and changed to be package private.
*
* <p>Implementation is not thread-safe.
*
* @since 1.9
* @author rmistry@google.com (Ravi Mistry)
* @author peleyal@google.com (Eyal Peled)
*/
@SuppressWarnings("deprecation")
public final class MediaHttpUploader {
/**
* Upload content type header.
*
* @since 1.13
*/
public static final String CONTENT_LENGTH_HEADER = "X-Upload-Content-Length";
/**
* Upload content length header.
*
* @since 1.13
*/
public static final String CONTENT_TYPE_HEADER = "X-Upload-Content-Type";
/** Upload state associated with the Media HTTP uploader. */
public enum UploadState {
/** The upload process has not started yet. */
NOT_STARTED,
/** Set before the initiation request is sent. */
INITIATION_STARTED,
/** Set after the initiation request completes. */
INITIATION_COMPLETE,
/** Set after a media file chunk is uploaded. */
MEDIA_IN_PROGRESS,
/** Set after the complete media file is successfully uploaded. */
MEDIA_COMPLETE
}
/** The current state of the uploader. */
private UploadState uploadState = UploadState.NOT_STARTED;
static final int MB = 0x100000;
private static final int KB = 0x400;
/** Minimum number of bytes that can be uploaded to the server (set to 256KB). */
public static final int MINIMUM_CHUNK_SIZE = 256 * KB;
/**
* Default maximum number of bytes that will be uploaded to the server in any single HTTP request
* (set to 10 MB).
*/
public static final int DEFAULT_CHUNK_SIZE = 10 * MB;
/** The HTTP content of the media to be uploaded. */
private final AbstractInputStreamContent mediaContent;
/** The request factory for connections to the server. */
private final HttpRequestFactory requestFactory;
/** The transport to use for requests. */
private final HttpTransport transport;
/** HTTP content metadata of the media to be uploaded or {@code null} for none. */
private HttpContent metadata;
/**
* The length of the HTTP media content.
*
* <p>{@code 0} before it is lazily initialized in {@link #getMediaContentLength()} after which it
* could still be {@code 0} for empty media content. Will be {@code < 0} if the media content
* length has not been specified.
*/
private long mediaContentLength;
/**
* Determines if media content length has been calculated yet in {@link #getMediaContentLength()}.
*/
private boolean isMediaContentLengthCalculated;
/**
* The HTTP method used for the initiation request.
*
* <p>Can only be {@link HttpMethods#POST} (for media upload) or {@link HttpMethods#PUT} (for
* media update). The default value is {@link HttpMethods#POST}.
*/
private String initiationRequestMethod = HttpMethods.POST;
/** The HTTP headers used in the initiation request. */
private HttpHeaders initiationHeaders = new HttpHeaders();
/**
* The HTTP request object that is currently used to send upload requests or {@code null} before
* {@link #upload}.
*/
private HttpRequest currentRequest;
/** An Input stream of the HTTP media content or {@code null} before {@link #upload}. */
private InputStream contentInputStream;
/**
* Determines whether direct media upload is enabled or disabled. If value is set to {@code true}
* then a direct upload will be done where the whole media content is uploaded in a single request
* If value is set to {@code false} then the upload uses the resumable media upload protocol to
* upload in data chunks. Defaults to {@code false}.
*/
private boolean directUploadEnabled;
/** Progress listener to send progress notifications to or {@code null} for none. */
private MediaHttpUploaderProgressListener progressListener;
/**
* The media content length is used in the "Content-Range" header. If we reached the end of the
* stream, this variable will be set with the length of the stream. This value is used only in
* resumable media upload.
*/
String mediaContentLengthStr = "*";
/**
* The number of bytes the server received so far. This value will not be calculated for direct
* uploads when the content length is not known in advance.
*/
// TODO(rmistry): Figure out a way to compute the content length using CountingInputStream.
private long totalBytesServerReceived;
/**
* Maximum size of individual chunks that will get uploaded by single HTTP requests. The default
* value is {@link #DEFAULT_CHUNK_SIZE}.
*/
private int chunkSize = DEFAULT_CHUNK_SIZE;
/**
* Used to cache a single byte when the media content length is unknown or {@code null} for none.
*/
private Byte cachedByte;
/**
* The number of bytes the client had sent to the server so far or {@code 0} for none. It is used
* for resumable media upload when the media content length is not specified.
*/
private long totalBytesClientSent;
/**
* The number of bytes of the current chunk which was sent to the server or {@code 0} for none.
* This value equals to chunk size for each chunk the client send to the server, except for the
* ending chunk.
*/
private int currentChunkLength;
/**
* The content buffer of the current request or {@code null} for none. It is used for resumable
* media upload when the media content length is not specified. It is instantiated for every
* request in {@link #buildContentChunk()} and is set to {@code null} when the request is
* completed in {@link #upload}.
*/
private byte currentRequestContentBuffer[];
/**
* Whether to disable GZip compression of HTTP content.
*
* <p>The default value is {@code false}.
*/
private boolean disableGZipContent;
/** Sleeper. */
Sleeper sleeper = Sleeper.DEFAULT;
/**
* Construct the {@link MediaHttpUploader}.
*
* <p>The input stream received by calling {@link AbstractInputStreamContent#getInputStream} is
* closed when the upload process is successfully completed. For resumable uploads, when the media
* content length is known, if the input stream has {@link InputStream#markSupported} as {@code
* false} then it is wrapped in an {@link BufferedInputStream} to support the {@link
* InputStream#mark} and {@link InputStream#reset} methods required for handling server errors. If
* the media content length is unknown then each chunk is stored temporarily in memory. This is
* required to determine when the last chunk is reached.
*
* @param mediaContent The Input stream content of the media to be uploaded
* @param transport The transport to use for requests
* @param httpRequestInitializer The initializer to use when creating an {@link HttpRequest} or
* {@code null} for none
*/
public MediaHttpUploader(
AbstractInputStreamContent mediaContent,
HttpTransport transport,
HttpRequestInitializer httpRequestInitializer) {
this.mediaContent = Preconditions.checkNotNull(mediaContent);
this.transport = Preconditions.checkNotNull(transport);
this.requestFactory =
httpRequestInitializer == null
? transport.createRequestFactory()
: transport.createRequestFactory(httpRequestInitializer);
}
/**
* Executes a direct media upload or resumable media upload conforming to the specifications
* listed <a
* href='https://developers.google.com/api-client-library/java/google-api-java-client/media-upload'>here.</a>
*
* <p>This method is not reentrant. A new instance of {@link MediaHttpUploader} must be
* instantiated before upload called be called again.
*
* <p>If an error is encountered during the request execution the caller is responsible for
* parsing the response correctly. For example for JSON errors:
*
* <pre>{@code
* if (!response.isSuccessStatusCode()) {
* throw GoogleJsonResponseException.from(jsonFactory, response);
* }
* }</pre>
*
* <p>Callers should call {@link HttpResponse#disconnect} when the returned HTTP response object
* is no longer needed. However, {@link HttpResponse#disconnect} does not have to be called if the
* response stream is properly closed. Example usage:
*
* <pre>{@code
* HttpResponse response = batch.upload(initiationRequestUrl);
* try {
* // process the HTTP response object
* } finally {
* response.disconnect();
* }
* }</pre>
*
* @param initiationRequestUrl The request URL where the initiation request will be sent
* @return HTTP response
*/
public HttpResponse upload(GenericUrl initiationRequestUrl) throws IOException {
Preconditions.checkArgument(uploadState == UploadState.NOT_STARTED);
if (directUploadEnabled) {
return directUpload(initiationRequestUrl);
}
return resumableUpload(initiationRequestUrl);
}
/**
* Direct Uploads the media.
*
* @param initiationRequestUrl The request URL where the initiation request will be sent
* @return HTTP response
*/
private HttpResponse directUpload(GenericUrl initiationRequestUrl) throws IOException {
updateStateAndNotifyListener(UploadState.MEDIA_IN_PROGRESS);
HttpContent content = mediaContent;
if (metadata != null) {
content = new MultipartContent().setContentParts(Arrays.asList(metadata, mediaContent));
initiationRequestUrl.put("uploadType", "multipart");
} else {
initiationRequestUrl.put("uploadType", "media");
}
HttpRequest request =
requestFactory.buildRequest(initiationRequestMethod, initiationRequestUrl, content);
request.getHeaders().putAll(initiationHeaders);
// We do not have to do anything special here if media content length is unspecified because
// direct media upload works even when the media content length == -1.
HttpResponse response = executeCurrentRequest(request);
boolean responseProcessed = false;
try {
if (isMediaLengthKnown()) {
totalBytesServerReceived = getMediaContentLength();
}
updateStateAndNotifyListener(UploadState.MEDIA_COMPLETE);
responseProcessed = true;
} finally {
if (!responseProcessed) {
response.disconnect();
}
}
return response;
}
/**
* Uploads the media in a resumable manner.
*
* @param initiationRequestUrl The request URL where the initiation request will be sent
* @return HTTP response
*/
private HttpResponse resumableUpload(GenericUrl initiationRequestUrl) throws IOException {
// Make initial request to get the unique upload URL.
HttpResponse initialResponse = executeUploadInitiation(initiationRequestUrl);
if (!initialResponse.isSuccessStatusCode()) {
// If the initiation request is not successful return it immediately.
return initialResponse;
}
GenericUrl uploadUrl;
try {
uploadUrl = new GenericUrl(initialResponse.getHeaders().getLocation());
} finally {
initialResponse.disconnect();
}
// Convert media content into a byte stream to upload in chunks.
contentInputStream = mediaContent.getInputStream();
if (!contentInputStream.markSupported() && isMediaLengthKnown()) {
// If we know the media content length then wrap the stream into a Buffered input stream to
// support the {@link InputStream#mark} and {@link InputStream#reset} methods required for
// handling server errors.
contentInputStream = new BufferedInputStream(contentInputStream);
}
HttpResponse response;
// Upload the media content in chunks.
while (true) {
ContentChunk contentChunk = buildContentChunk();
currentRequest = requestFactory.buildPutRequest(uploadUrl, null);
currentRequest.setContent(contentChunk.getContent());
currentRequest.getHeaders().setContentRange(contentChunk.getContentRange());
// set mediaErrorHandler as I/O exception handler and as unsuccessful response handler for
// calling to serverErrorCallback on an I/O exception or an abnormal HTTP response
new MediaUploadErrorHandler(this, currentRequest);
if (isMediaLengthKnown()) {
// TODO(rmistry): Support gzipping content for the case where media content length is
// known (https://github.com/googleapis/google-api-java-client/issues/691).
response = executeCurrentRequestWithoutGZip(currentRequest);
} else {
response = executeCurrentRequest(currentRequest);
}
boolean returningResponse = false;
try {
if (response.isSuccessStatusCode()) {
totalBytesServerReceived = getMediaContentLength();
if (mediaContent.getCloseInputStream()) {
contentInputStream.close();
}
updateStateAndNotifyListener(UploadState.MEDIA_COMPLETE);
returningResponse = true;
return response;
}
if (response.getStatusCode() != 308) {
if (mediaContent.getCloseInputStream()) {
contentInputStream.close();
}
returningResponse = true;
return response;
}
// Check to see if the upload URL has changed on the server.
String updatedUploadUrl = response.getHeaders().getLocation();
if (updatedUploadUrl != null) {
uploadUrl = new GenericUrl(updatedUploadUrl);
}
// we check the amount of bytes the server received so far, because the server may process
// fewer bytes than the amount of bytes the client had sent
long newBytesServerReceived = getNextByteIndex(response.getHeaders().getRange());
// the server can receive any amount of bytes from 0 to current chunk length
long currentBytesServerReceived = newBytesServerReceived - totalBytesServerReceived;
Preconditions.checkState(
currentBytesServerReceived >= 0 && currentBytesServerReceived <= currentChunkLength);
long copyBytes = currentChunkLength - currentBytesServerReceived;
if (isMediaLengthKnown()) {
if (copyBytes > 0) {
// If the server didn't receive all the bytes the client sent the current position of
// the input stream is incorrect. So we should reset the stream and skip those bytes
// that the server had already received.
// Otherwise (the server got all bytes the client sent), the stream is in its right
// position, and we can continue from there
contentInputStream.reset();
long actualSkipValue = contentInputStream.skip(currentBytesServerReceived);
Preconditions.checkState(currentBytesServerReceived == actualSkipValue);
}
} else if (copyBytes == 0) {
// server got all the bytes, so we don't need to use this buffer. Otherwise, we have to
// keep the buffer and copy part (or all) of its bytes to the stream we are sending to the
// server
currentRequestContentBuffer = null;
}
totalBytesServerReceived = newBytesServerReceived;
updateStateAndNotifyListener(UploadState.MEDIA_IN_PROGRESS);
} finally {
if (!returningResponse) {
response.disconnect();
}
}
}
}
/** @return {@code true} if the media length is known, otherwise {@code false} */
private boolean isMediaLengthKnown() throws IOException {
return getMediaContentLength() >= 0;
}
/**
* Uses lazy initialization to compute the media content length.
*
* <p>This is done to avoid throwing an {@link IOException} in the constructor.
*/
private long getMediaContentLength() throws IOException {
if (!isMediaContentLengthCalculated) {
mediaContentLength = mediaContent.getLength();
isMediaContentLengthCalculated = true;
}
return mediaContentLength;
}
/**
* This method sends a POST request with empty content to get the unique upload URL.
*
* @param initiationRequestUrl The request URL where the initiation request will be sent
*/
private HttpResponse executeUploadInitiation(GenericUrl initiationRequestUrl) throws IOException {
updateStateAndNotifyListener(UploadState.INITIATION_STARTED);
initiationRequestUrl.put("uploadType", "resumable");
HttpContent content = metadata == null ? new EmptyContent() : metadata;
HttpRequest request =
requestFactory.buildRequest(initiationRequestMethod, initiationRequestUrl, content);
initiationHeaders.set(CONTENT_TYPE_HEADER, mediaContent.getType());
if (isMediaLengthKnown()) {
initiationHeaders.set(CONTENT_LENGTH_HEADER, getMediaContentLength());
}
request.getHeaders().putAll(initiationHeaders);
HttpResponse response = executeCurrentRequest(request);
boolean notificationCompleted = false;
try {
updateStateAndNotifyListener(UploadState.INITIATION_COMPLETE);
notificationCompleted = true;
} finally {
if (!notificationCompleted) {
response.disconnect();
}
}
return response;
}
/**
* Executes the current request with some minimal common code.
*
* @param request current request
* @return HTTP response
*/
private HttpResponse executeCurrentRequestWithoutGZip(HttpRequest request) throws IOException {
// method override for non-POST verbs
new MethodOverride().intercept(request);
// don't throw an exception so we can let a custom Google exception be thrown
request.setThrowExceptionOnExecuteError(false);
// execute the request
HttpResponse response = request.execute();
return response;
}
/**
* Executes the current request with some common code that includes exponential backoff and GZip
* encoding.
*
* @param request current request
* @return HTTP response
*/
private HttpResponse executeCurrentRequest(HttpRequest request) throws IOException {
// enable GZip encoding if necessary
if (!disableGZipContent && !(request.getContent() instanceof EmptyContent)) {
request.setEncoding(new GZipEncoding());
}
// execute request
HttpResponse response = executeCurrentRequestWithoutGZip(request);
return response;
}
/**
* Sets the HTTP media content chunk and the required headers that should be used in the upload
* request.
*/
private ContentChunk buildContentChunk() throws IOException {
int blockSize;
if (isMediaLengthKnown()) {
// We know exactly what the blockSize will be because we know the media content length.
blockSize = (int) Math.min(chunkSize, getMediaContentLength() - totalBytesServerReceived);
} else {
// Use the chunkSize as the blockSize because we do know what what it is yet.
blockSize = chunkSize;
}
AbstractInputStreamContent contentChunk;
int actualBlockSize = blockSize;
if (isMediaLengthKnown()) {
// Mark the current position in case we need to retry the request.
contentInputStream.mark(blockSize);
InputStream limitInputStream = ByteStreams.limit(contentInputStream, blockSize);
contentChunk =
new InputStreamContent(mediaContent.getType(), limitInputStream)
.setRetrySupported(true)
.setLength(blockSize)
.setCloseInputStream(false);
mediaContentLengthStr = String.valueOf(getMediaContentLength());
} else {
// If the media content length is not known we implement a custom buffered input stream that
// enables us to detect the length of the media content when the last chunk is sent. We
// accomplish this by always trying to read an extra byte further than the end of the current
// chunk.
int actualBytesRead;
int bytesAllowedToRead;
// amount of bytes which need to be copied from last chunk buffer
int copyBytes = 0;
if (currentRequestContentBuffer == null) {
bytesAllowedToRead = cachedByte == null ? blockSize + 1 : blockSize;
currentRequestContentBuffer = new byte[blockSize + 1];
if (cachedByte != null) {
currentRequestContentBuffer[0] = cachedByte;
}
} else {
// currentRequestContentBuffer is not null that means one of the following:
// 1. This is a request to recover from a server error (e.g. 503)
// or
// 2. The server received less bytes than the amount of bytes the client had sent. For
// example, the client sends bytes 100-199, but the server returns back status code 308,
// and its "Range" header is "bytes=0-150".
// In that case, the new request will be constructed from the previous request's byte buffer
// plus new bytes from the stream.
copyBytes = (int) (totalBytesClientSent - totalBytesServerReceived);
// shift copyBytes bytes to the beginning - those are the bytes which weren't received by
// the server in the last chunk.
System.arraycopy(
currentRequestContentBuffer,
currentChunkLength - copyBytes,
currentRequestContentBuffer,
0,
copyBytes);
if (cachedByte != null) {
// add the last cached byte to the buffer
currentRequestContentBuffer[copyBytes] = cachedByte;
}
bytesAllowedToRead = blockSize - copyBytes;
}
actualBytesRead =
ByteStreams.read(
contentInputStream,
currentRequestContentBuffer,
blockSize + 1 - bytesAllowedToRead,
bytesAllowedToRead);
if (actualBytesRead < bytesAllowedToRead) {
actualBlockSize = copyBytes + Math.max(0, actualBytesRead);
if (cachedByte != null) {
actualBlockSize++;
cachedByte = null;
}
if (mediaContentLengthStr.equals("*")) {
// At this point we know we reached the media content length because we either read less
// than the specified chunk size or there is no more data left to be read.
mediaContentLengthStr = String.valueOf(totalBytesServerReceived + actualBlockSize);
}
} else {
cachedByte = currentRequestContentBuffer[blockSize];
}
contentChunk =
new ByteArrayContent(
mediaContent.getType(), currentRequestContentBuffer, 0, actualBlockSize);
totalBytesClientSent = totalBytesServerReceived + actualBlockSize;
}
currentChunkLength = actualBlockSize;
String contentRange;
if (actualBlockSize == 0) {
// No bytes to upload. Either zero content media being uploaded, or a server failure on the
// last write, even though the write actually succeeded. Either way,
// mediaContentLengthStr will contain the actual media length.
contentRange = "bytes */" + mediaContentLengthStr;
} else {
contentRange =
"bytes "
+ totalBytesServerReceived
+ "-"
+ (totalBytesServerReceived + actualBlockSize - 1)
+ "/"
+ mediaContentLengthStr;
}
return new ContentChunk(contentChunk, contentRange);
}
private static class ContentChunk {
private final AbstractInputStreamContent content;
private final String contentRange;
ContentChunk(AbstractInputStreamContent content, String contentRange) {
this.content = content;
this.contentRange = contentRange;
}
AbstractInputStreamContent getContent() {
return content;
}
String getContentRange() {
return contentRange;
}
}
/**
* {@link Beta} <br>
* The call back method that will be invoked on a server error or an I/O exception during
* resumable upload inside {@link #upload}.
*
* <p>This method changes the current request to query the current status of the upload to find
* how many bytes were successfully uploaded before the server error occurred.
*/
@Beta
void serverErrorCallback() throws IOException {
Preconditions.checkNotNull(currentRequest, "The current request should not be null");
// Query the current status of the upload by issuing an empty PUT request on the upload URI.
currentRequest.setContent(new EmptyContent());
currentRequest.getHeaders().setContentRange("bytes */" + mediaContentLengthStr);
}
/**
* Returns the next byte index identifying data that the server has not yet received, obtained
* from the HTTP Range header (E.g a header of "Range: 0-55" would cause 56 to be returned).
* <code>null</code> or malformed headers cause 0 to be returned.
*
* @param rangeHeader in the HTTP response
* @return the byte index beginning where the server has yet to receive data
*/
private long getNextByteIndex(String rangeHeader) {
if (rangeHeader == null) {
return 0L;
}
return Long.parseLong(rangeHeader.substring(rangeHeader.indexOf('-') + 1)) + 1;
}
/** Returns HTTP content metadata for the media request or {@code null} for none. */
public HttpContent getMetadata() {
return metadata;
}
/** Sets HTTP content metadata for the media request or {@code null} for none. */
public MediaHttpUploader setMetadata(HttpContent metadata) {
this.metadata = metadata;
return this;
}
/** Returns the HTTP content of the media to be uploaded. */
public HttpContent getMediaContent() {
return mediaContent;
}
/** Returns the transport to use for requests. */
public HttpTransport getTransport() {
return transport;
}
/**
* Sets whether direct media upload is enabled or disabled.
*
* <p>If value is set to {@code true} then a direct upload will be done where the whole media
* content is uploaded in a single request. If value is set to {@code false} then the upload uses
* the resumable media upload protocol to upload in data chunks.
*
* <p>Direct upload is recommended if the content size falls below a certain minimum limit. This
* is because there's minimum block write size for some Google APIs, so if the resumable request
* fails in the space of that first block, the client will have to restart from the beginning
* anyway.
*
* <p>Defaults to {@code false}.
*
* @since 1.9
*/
public MediaHttpUploader setDirectUploadEnabled(boolean directUploadEnabled) {
this.directUploadEnabled = directUploadEnabled;
return this;
}
/**
* Returns whether direct media upload is enabled or disabled. If value is set to {@code true}
* then a direct upload will be done where the whole media content is uploaded in a single
* request. If value is set to {@code false} then the upload uses the resumable media upload
* protocol to upload in data chunks. Defaults to {@code false}.
*
* @since 1.9
*/
public boolean isDirectUploadEnabled() {
return directUploadEnabled;
}
/** Sets the progress listener to send progress notifications to or {@code null} for none. */
public MediaHttpUploader setProgressListener(MediaHttpUploaderProgressListener progressListener) {
this.progressListener = progressListener;
return this;
}
/** Returns the progress listener to send progress notifications to or {@code null} for none. */
public MediaHttpUploaderProgressListener getProgressListener() {
return progressListener;
}
/**
* Sets the maximum size of individual chunks that will get uploaded by single HTTP requests. The
* default value is {@link #DEFAULT_CHUNK_SIZE}.
*
* <p>The minimum allowable value is {@link #MINIMUM_CHUNK_SIZE} and the specified chunk size must
* be a multiple of {@link #MINIMUM_CHUNK_SIZE}.
*/
public MediaHttpUploader setChunkSize(int chunkSize) {
Preconditions.checkArgument(
chunkSize > 0 && chunkSize % MINIMUM_CHUNK_SIZE == 0,
"chunkSize" + " must be a positive multiple of " + MINIMUM_CHUNK_SIZE + ".");
this.chunkSize = chunkSize;
return this;
}
/**
* Returns the maximum size of individual chunks that will get uploaded by single HTTP requests.
* The default value is {@link #DEFAULT_CHUNK_SIZE}.
*/
public int getChunkSize() {
return chunkSize;
}
/**
* Returns whether to disable GZip compression of HTTP content.
*
* @since 1.13
*/
public boolean getDisableGZipContent() {
return disableGZipContent;
}
/**
* Sets whether to disable GZip compression of HTTP content.
*
* <p>By default it is {@code false}.
*
* <p>If {@link #setDisableGZipContent(boolean)} is set to false (the default value) then content
* is gzipped for direct media upload and resumable media uploads when content length is not
* known. Due to a current limitation, content is not gzipped for resumable media uploads when
* content length is known; this limitation will be removed in the future.
*
* @since 1.13
*/
public MediaHttpUploader setDisableGZipContent(boolean disableGZipContent) {
this.disableGZipContent = disableGZipContent;
return this;
}
/**
* Returns the sleeper.
*
* @since 1.15
*/
public Sleeper getSleeper() {
return sleeper;
}
/**
* Sets the sleeper. The default value is {@link Sleeper#DEFAULT}.
*
* @since 1.15
*/
public MediaHttpUploader setSleeper(Sleeper sleeper) {
this.sleeper = sleeper;
return this;
}
/**
* Returns the HTTP method used for the initiation request.
*
* <p>The default value is {@link HttpMethods#POST}.
*
* @since 1.12
*/
public String getInitiationRequestMethod() {
return initiationRequestMethod;
}
/**
* Sets the HTTP method used for the initiation request.
*
* <p>Can only be {@link HttpMethods#POST} (for media upload) or {@link HttpMethods#PUT} (for
* media update). The default value is {@link HttpMethods#POST}.
*
* @since 1.12
*/
public MediaHttpUploader setInitiationRequestMethod(String initiationRequestMethod) {
Preconditions.checkArgument(
initiationRequestMethod.equals(HttpMethods.POST)
|| initiationRequestMethod.equals(HttpMethods.PUT)
|| initiationRequestMethod.equals(HttpMethods.PATCH));
this.initiationRequestMethod = initiationRequestMethod;
return this;
}
/** Sets the HTTP headers used for the initiation request. */
public MediaHttpUploader setInitiationHeaders(HttpHeaders initiationHeaders) {
this.initiationHeaders = initiationHeaders;
return this;
}
/** Returns the HTTP headers used for the initiation request. */
public HttpHeaders getInitiationHeaders() {
return initiationHeaders;
}
/**
* Gets the total number of bytes the server received so far or {@code 0} for direct uploads when
* the content length is not known.
*
* @return the number of bytes the server received so far
*/
public long getNumBytesUploaded() {
return totalBytesServerReceived;
}
/**
* Sets the upload state and notifies the progress listener.
*
* @param uploadState value to set to
*/
private void updateStateAndNotifyListener(UploadState uploadState) throws IOException {
this.uploadState = uploadState;
if (progressListener != null) {
progressListener.progressChanged(this);
}
}
/**
* Gets the current upload state of the uploader.
*
* @return the upload state
*/
public UploadState getUploadState() {
return uploadState;
}
/**
* Gets the upload progress denoting the percentage of bytes that have been uploaded, represented
* between 0.0 (0%) and 1.0 (100%).
*
* <p>Do not use if the specified {@link AbstractInputStreamContent} has no content length
* specified. Instead, consider using {@link #getNumBytesUploaded} to denote progress.
*
* @throws IllegalArgumentException if the specified {@link AbstractInputStreamContent} has no
* content length
* @return the upload progress
*/
public double getProgress() throws IOException {
Preconditions.checkArgument(
isMediaLengthKnown(),
"Cannot call getProgress() if "
+ "the specified AbstractInputStreamContent has no content length. Use "
+ " getNumBytesUploaded() to denote progress instead.");
return getMediaContentLength() == 0
? 0
: (double) totalBytesServerReceived / getMediaContentLength();
}
}
|
apache/flink-statefun-playground | 35,573 | java/showcase/src/main/java/org/apache/flink/statefun/playground/java/showcase/part1/types/generated/UserProfile.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: my-protobuf-type.proto
package org.apache.flink.statefun.playground.java.showcase.part1.types.generated;
/** Protobuf type {@code org.apache.flink.statefun.playground.java.showcase.UserProfile} */
public final class UserProfile extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:org.apache.flink.statefun.playground.java.showcase.UserProfile)
UserProfileOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserProfile.newBuilder() to construct.
private UserProfile(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserProfile() {
name_ = "";
loginLocation_ = "";
friend_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserProfile();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UserProfile(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 16:
{
lastSeenDeltaMs_ = input.readInt64();
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
loginLocation_ = s;
break;
}
case 32:
{
seenCount_ = input.readInt32();
break;
}
case 40:
{
age_ = input.readInt32();
break;
}
case 50:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
friend_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
friend_.add(s);
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
friend_ = friend_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.MyProtobufType
.internal_static_org_apache_flink_statefun_playground_java_showcase_UserProfile_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.MyProtobufType
.internal_static_org_apache_flink_statefun_playground_java_showcase_UserProfile_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.class,
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LAST_SEEN_DELTA_MS_FIELD_NUMBER = 2;
private long lastSeenDeltaMs_;
/**
* <code>int64 last_seen_delta_ms = 2;</code>
*
* @return The lastSeenDeltaMs.
*/
public long getLastSeenDeltaMs() {
return lastSeenDeltaMs_;
}
public static final int LOGIN_LOCATION_FIELD_NUMBER = 3;
private volatile java.lang.Object loginLocation_;
/**
* <code>string login_location = 3;</code>
*
* @return The loginLocation.
*/
public java.lang.String getLoginLocation() {
java.lang.Object ref = loginLocation_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
loginLocation_ = s;
return s;
}
}
/**
* <code>string login_location = 3;</code>
*
* @return The bytes for loginLocation.
*/
public com.google.protobuf.ByteString getLoginLocationBytes() {
java.lang.Object ref = loginLocation_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
loginLocation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SEEN_COUNT_FIELD_NUMBER = 4;
private int seenCount_;
/**
* <code>int32 seen_count = 4;</code>
*
* @return The seenCount.
*/
public int getSeenCount() {
return seenCount_;
}
public static final int AGE_FIELD_NUMBER = 5;
private int age_;
/**
* <code>int32 age = 5;</code>
*
* @return The age.
*/
public int getAge() {
return age_;
}
public static final int FRIEND_FIELD_NUMBER = 6;
private com.google.protobuf.LazyStringList friend_;
/**
* <code>repeated string friend = 6;</code>
*
* @return A list containing the friend.
*/
public com.google.protobuf.ProtocolStringList getFriendList() {
return friend_;
}
/**
* <code>repeated string friend = 6;</code>
*
* @return The count of friend.
*/
public int getFriendCount() {
return friend_.size();
}
/**
* <code>repeated string friend = 6;</code>
*
* @param index The index of the element to return.
* @return The friend at the given index.
*/
public java.lang.String getFriend(int index) {
return friend_.get(index);
}
/**
* <code>repeated string friend = 6;</code>
*
* @param index The index of the value to return.
* @return The bytes of the friend at the given index.
*/
public com.google.protobuf.ByteString getFriendBytes(int index) {
return friend_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (lastSeenDeltaMs_ != 0L) {
output.writeInt64(2, lastSeenDeltaMs_);
}
if (!getLoginLocationBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, loginLocation_);
}
if (seenCount_ != 0) {
output.writeInt32(4, seenCount_);
}
if (age_ != 0) {
output.writeInt32(5, age_);
}
for (int i = 0; i < friend_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, friend_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (lastSeenDeltaMs_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, lastSeenDeltaMs_);
}
if (!getLoginLocationBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, loginLocation_);
}
if (seenCount_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, seenCount_);
}
if (age_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(5, age_);
}
{
int dataSize = 0;
for (int i = 0; i < friend_.size(); i++) {
dataSize += computeStringSizeNoTag(friend_.getRaw(i));
}
size += dataSize;
size += 1 * getFriendList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile)) {
return super.equals(obj);
}
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile other =
(org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile) obj;
if (!getName().equals(other.getName())) return false;
if (getLastSeenDeltaMs() != other.getLastSeenDeltaMs()) return false;
if (!getLoginLocation().equals(other.getLoginLocation())) return false;
if (getSeenCount() != other.getSeenCount()) return false;
if (getAge() != other.getAge()) return false;
if (!getFriendList().equals(other.getFriendList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + LAST_SEEN_DELTA_MS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getLastSeenDeltaMs());
hash = (37 * hash) + LOGIN_LOCATION_FIELD_NUMBER;
hash = (53 * hash) + getLoginLocation().hashCode();
hash = (37 * hash) + SEEN_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getSeenCount();
hash = (37 * hash) + AGE_FIELD_NUMBER;
hash = (53 * hash) + getAge();
if (getFriendCount() > 0) {
hash = (37 * hash) + FRIEND_FIELD_NUMBER;
hash = (53 * hash) + getFriendList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code org.apache.flink.statefun.playground.java.showcase.UserProfile} */
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:org.apache.flink.statefun.playground.java.showcase.UserProfile)
org.apache.flink.statefun.playground.java.showcase.part1.types.generated
.UserProfileOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.MyProtobufType
.internal_static_org_apache_flink_statefun_playground_java_showcase_UserProfile_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.MyProtobufType
.internal_static_org_apache_flink_statefun_playground_java_showcase_UserProfile_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.class,
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.Builder.class);
}
// Construct using
// org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
lastSeenDeltaMs_ = 0L;
loginLocation_ = "";
seenCount_ = 0;
age_ = 0;
friend_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.MyProtobufType
.internal_static_org_apache_flink_statefun_playground_java_showcase_UserProfile_descriptor;
}
@java.lang.Override
public org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
getDefaultInstanceForType() {
return org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.getDefaultInstance();
}
@java.lang.Override
public org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
build() {
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
buildPartial() {
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile result =
new org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile(
this);
int from_bitField0_ = bitField0_;
result.name_ = name_;
result.lastSeenDeltaMs_ = lastSeenDeltaMs_;
result.loginLocation_ = loginLocation_;
result.seenCount_ = seenCount_;
result.age_ = age_;
if (((bitField0_ & 0x00000001) != 0)) {
friend_ = friend_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.friend_ = friend_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile) {
return mergeFrom(
(org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
other) {
if (other
== org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.getLastSeenDeltaMs() != 0L) {
setLastSeenDeltaMs(other.getLastSeenDeltaMs());
}
if (!other.getLoginLocation().isEmpty()) {
loginLocation_ = other.loginLocation_;
onChanged();
}
if (other.getSeenCount() != 0) {
setSeenCount(other.getSeenCount());
}
if (other.getAge() != 0) {
setAge(other.getAge());
}
if (!other.friend_.isEmpty()) {
if (friend_.isEmpty()) {
friend_ = other.friend_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFriendIsMutable();
friend_.addAll(other.friend_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private long lastSeenDeltaMs_;
/**
* <code>int64 last_seen_delta_ms = 2;</code>
*
* @return The lastSeenDeltaMs.
*/
public long getLastSeenDeltaMs() {
return lastSeenDeltaMs_;
}
/**
* <code>int64 last_seen_delta_ms = 2;</code>
*
* @param value The lastSeenDeltaMs to set.
* @return This builder for chaining.
*/
public Builder setLastSeenDeltaMs(long value) {
lastSeenDeltaMs_ = value;
onChanged();
return this;
}
/**
* <code>int64 last_seen_delta_ms = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLastSeenDeltaMs() {
lastSeenDeltaMs_ = 0L;
onChanged();
return this;
}
private java.lang.Object loginLocation_ = "";
/**
* <code>string login_location = 3;</code>
*
* @return The loginLocation.
*/
public java.lang.String getLoginLocation() {
java.lang.Object ref = loginLocation_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
loginLocation_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>string login_location = 3;</code>
*
* @return The bytes for loginLocation.
*/
public com.google.protobuf.ByteString getLoginLocationBytes() {
java.lang.Object ref = loginLocation_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
loginLocation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string login_location = 3;</code>
*
* @param value The loginLocation to set.
* @return This builder for chaining.
*/
public Builder setLoginLocation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
loginLocation_ = value;
onChanged();
return this;
}
/**
* <code>string login_location = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearLoginLocation() {
loginLocation_ = getDefaultInstance().getLoginLocation();
onChanged();
return this;
}
/**
* <code>string login_location = 3;</code>
*
* @param value The bytes for loginLocation to set.
* @return This builder for chaining.
*/
public Builder setLoginLocationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
loginLocation_ = value;
onChanged();
return this;
}
private int seenCount_;
/**
* <code>int32 seen_count = 4;</code>
*
* @return The seenCount.
*/
public int getSeenCount() {
return seenCount_;
}
/**
* <code>int32 seen_count = 4;</code>
*
* @param value The seenCount to set.
* @return This builder for chaining.
*/
public Builder setSeenCount(int value) {
seenCount_ = value;
onChanged();
return this;
}
/**
* <code>int32 seen_count = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearSeenCount() {
seenCount_ = 0;
onChanged();
return this;
}
private int age_;
/**
* <code>int32 age = 5;</code>
*
* @return The age.
*/
public int getAge() {
return age_;
}
/**
* <code>int32 age = 5;</code>
*
* @param value The age to set.
* @return This builder for chaining.
*/
public Builder setAge(int value) {
age_ = value;
onChanged();
return this;
}
/**
* <code>int32 age = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearAge() {
age_ = 0;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList friend_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureFriendIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
friend_ = new com.google.protobuf.LazyStringArrayList(friend_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string friend = 6;</code>
*
* @return A list containing the friend.
*/
public com.google.protobuf.ProtocolStringList getFriendList() {
return friend_.getUnmodifiableView();
}
/**
* <code>repeated string friend = 6;</code>
*
* @return The count of friend.
*/
public int getFriendCount() {
return friend_.size();
}
/**
* <code>repeated string friend = 6;</code>
*
* @param index The index of the element to return.
* @return The friend at the given index.
*/
public java.lang.String getFriend(int index) {
return friend_.get(index);
}
/**
* <code>repeated string friend = 6;</code>
*
* @param index The index of the value to return.
* @return The bytes of the friend at the given index.
*/
public com.google.protobuf.ByteString getFriendBytes(int index) {
return friend_.getByteString(index);
}
/**
* <code>repeated string friend = 6;</code>
*
* @param index The index to set the value at.
* @param value The friend to set.
* @return This builder for chaining.
*/
public Builder setFriend(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureFriendIsMutable();
friend_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string friend = 6;</code>
*
* @param value The friend to add.
* @return This builder for chaining.
*/
public Builder addFriend(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureFriendIsMutable();
friend_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string friend = 6;</code>
*
* @param values The friend to add.
* @return This builder for chaining.
*/
public Builder addAllFriend(java.lang.Iterable<java.lang.String> values) {
ensureFriendIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, friend_);
onChanged();
return this;
}
/**
* <code>repeated string friend = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearFriend() {
friend_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string friend = 6;</code>
*
* @param value The bytes of the friend to add.
* @return This builder for chaining.
*/
public Builder addFriendBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureFriendIsMutable();
friend_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.apache.flink.statefun.playground.java.showcase.UserProfile)
}
// @@protoc_insertion_point(class_scope:org.apache.flink.statefun.playground.java.showcase.UserProfile)
private static final org.apache.flink.statefun.playground.java.showcase.part1.types.generated
.UserProfile
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile();
}
public static org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserProfile> PARSER =
new com.google.protobuf.AbstractParser<UserProfile>() {
@java.lang.Override
public UserProfile parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UserProfile(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UserProfile> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserProfile> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.flink.statefun.playground.java.showcase.part1.types.generated.UserProfile
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,604 | java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/UpdateNotificationConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v2/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v2;
/**
*
*
* <pre>
* Request message for updating a notification config.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.UpdateNotificationConfigRequest}
*/
public final class UpdateNotificationConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.UpdateNotificationConfigRequest)
UpdateNotificationConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateNotificationConfigRequest.newBuilder() to construct.
private UpdateNotificationConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateNotificationConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateNotificationConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateNotificationConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest.class,
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest.Builder.class);
}
private int bitField0_;
public static final int NOTIFICATION_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.securitycenter.v2.NotificationConfig notificationConfig_;
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the notificationConfig field is set.
*/
@java.lang.Override
public boolean hasNotificationConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The notificationConfig.
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.NotificationConfig getNotificationConfig() {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v2.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.NotificationConfigOrBuilder
getNotificationConfigOrBuilder() {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v2.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNotificationConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNotificationConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest other =
(com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest) obj;
if (hasNotificationConfig() != other.hasNotificationConfig()) return false;
if (hasNotificationConfig()) {
if (!getNotificationConfig().equals(other.getNotificationConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNotificationConfig()) {
hash = (37 * hash) + NOTIFICATION_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getNotificationConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating a notification config.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.UpdateNotificationConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.UpdateNotificationConfigRequest)
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateNotificationConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest.class,
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getNotificationConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
notificationConfig_ = null;
if (notificationConfigBuilder_ != null) {
notificationConfigBuilder_.dispose();
notificationConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateNotificationConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest build() {
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest buildPartial() {
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest result =
new com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.notificationConfig_ =
notificationConfigBuilder_ == null
? notificationConfig_
: notificationConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest) {
return mergeFrom(
(com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest other) {
if (other
== com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
.getDefaultInstance()) return this;
if (other.hasNotificationConfig()) {
mergeNotificationConfig(other.getNotificationConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getNotificationConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.securitycenter.v2.NotificationConfig notificationConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.NotificationConfig,
com.google.cloud.securitycenter.v2.NotificationConfig.Builder,
com.google.cloud.securitycenter.v2.NotificationConfigOrBuilder>
notificationConfigBuilder_;
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the notificationConfig field is set.
*/
public boolean hasNotificationConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The notificationConfig.
*/
public com.google.cloud.securitycenter.v2.NotificationConfig getNotificationConfig() {
if (notificationConfigBuilder_ == null) {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v2.NotificationConfig.getDefaultInstance()
: notificationConfig_;
} else {
return notificationConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNotificationConfig(
com.google.cloud.securitycenter.v2.NotificationConfig value) {
if (notificationConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
notificationConfig_ = value;
} else {
notificationConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setNotificationConfig(
com.google.cloud.securitycenter.v2.NotificationConfig.Builder builderForValue) {
if (notificationConfigBuilder_ == null) {
notificationConfig_ = builderForValue.build();
} else {
notificationConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeNotificationConfig(
com.google.cloud.securitycenter.v2.NotificationConfig value) {
if (notificationConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& notificationConfig_ != null
&& notificationConfig_
!= com.google.cloud.securitycenter.v2.NotificationConfig.getDefaultInstance()) {
getNotificationConfigBuilder().mergeFrom(value);
} else {
notificationConfig_ = value;
}
} else {
notificationConfigBuilder_.mergeFrom(value);
}
if (notificationConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearNotificationConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
notificationConfig_ = null;
if (notificationConfigBuilder_ != null) {
notificationConfigBuilder_.dispose();
notificationConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v2.NotificationConfig.Builder
getNotificationConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNotificationConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v2.NotificationConfigOrBuilder
getNotificationConfigOrBuilder() {
if (notificationConfigBuilder_ != null) {
return notificationConfigBuilder_.getMessageOrBuilder();
} else {
return notificationConfig_ == null
? com.google.cloud.securitycenter.v2.NotificationConfig.getDefaultInstance()
: notificationConfig_;
}
}
/**
*
*
* <pre>
* Required. The notification config to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.NotificationConfig notification_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.NotificationConfig,
com.google.cloud.securitycenter.v2.NotificationConfig.Builder,
com.google.cloud.securitycenter.v2.NotificationConfigOrBuilder>
getNotificationConfigFieldBuilder() {
if (notificationConfigBuilder_ == null) {
notificationConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.NotificationConfig,
com.google.cloud.securitycenter.v2.NotificationConfig.Builder,
com.google.cloud.securitycenter.v2.NotificationConfigOrBuilder>(
getNotificationConfig(), getParentForChildren(), isClean());
notificationConfig_ = null;
}
return notificationConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the notification config.
*
* If empty all mutable fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.UpdateNotificationConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.UpdateNotificationConfigRequest)
private static final com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest();
}
public static com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateNotificationConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateNotificationConfigRequest>() {
@java.lang.Override
public UpdateNotificationConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateNotificationConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateNotificationConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateNotificationConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 34,225 | langtools/test/tools/javac/lambda/bridge/template_tests/BridgeMethodsTemplateTest.java | /*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
import java.io.IOException;
import org.testng.annotations.Test;
/**
* BridgeMethodsTemplateTest
*
* @author Brian Goetz
*/
@Test
public class BridgeMethodsTemplateTest extends BridgeMethodTestCase {
/*
* Cc1(A) -> Cc1(Ac0)
*
* 0*: Inherited from A
* 1: Declared in C
*/
public void test1() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(A)");
assertLinkage("C", LINKAGE_ERROR, "C1");
recompileSpec("Cc1(Ac0)", "A");
assertLinkage("C", "A0", "C1");
}
/*
* Cc1(I) -> Cc1(Id0)
*
* 0*: Inherited default from I
* 1: Declared in C
*/
public void test2() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(I)");
assertLinkage("C", LINKAGE_ERROR, "C1");
recompileSpec("Cc1(Id0)", "I");
assertLinkage("C", "I0", "C1");
}
/*
* C(Bc1(A)) -> C(Bc1(Ac0))
*
* 0*: Inherited from A
* 1: Inherited from B
*/
public void test3() throws IOException, ReflectiveOperationException {
compileSpec("C(Bc1(A))");
assertLinkage("C", LINKAGE_ERROR, "B1");
recompileSpec("C(Bc1(Ac0))", "A");
assertLinkage("C", "A0", "B1");
}
/*
* C(B(Ac0)) -> C(Bc1(Ac0))
*
* 0: Inherited from B (through bridge)
* 1: Inherited from B
*/
public void test4() throws IOException, ReflectiveOperationException {
compileSpec("C(B(Ac0))");
assertLinkage("C", "A0", LINKAGE_ERROR);
recompileSpec("C(Bc1(Ac0))", "B");
assertLinkage("C", "B1", "B1");
}
/*
* C(B(A)) -> C(Bc1(Ac0))
*
* 0: Inherited from B (through bridge)
* 1: Inherited from B
*/
public void test5() throws IOException, ReflectiveOperationException {
compileSpec("C(B(A))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR);
recompileSpec("C(Bc1(Ac0))", "A", "B");
assertLinkage("C", "B1", "B1");
}
/*
* C(Ac1(I)) -> C(Ac1(Id0))
*
* 0*: Inherited default from I
* 1: Inherited from A
*/
public void test6() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac1(I))");
assertLinkage("C", LINKAGE_ERROR, "A1");
recompileSpec("C(Ac1(Id0))", "I");
assertLinkage("C", "I0", "A1");
}
/*
* C(A(Id0)) -> C(Ac1(Id0))
*
* 0: Inherited from A (through bridge)
* 1: Inherited from A
*/
public void test7() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0))");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(Ac1(Id0))", "A");
assertLinkage("C", "A1", "A1");
}
/*
* C(A(I)) -> C(Ac1(Id0))
*
* 0*: Inherited from A (through bridge)
* 1*: Inherited from A
*/
public void test8() throws IOException, ReflectiveOperationException {
compileSpec("C(A(I))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR);
recompileSpec("C(Ac1(Id0))", "A", "I");
assertLinkage("C", "A1", "A1");
}
/*
* C(Id1(J)) -> C(Id1(Jd0))
*
* 0*: Inherited default from J
* 1: Inherited default from I
*/
public void test9() throws IOException, ReflectiveOperationException {
compileSpec("C(Id1(J))");
assertLinkage("C", LINKAGE_ERROR, "I1");
recompileSpec("C(Id1(Jd0))", "J");
assertLinkage("C", "J0", "I1");
}
/*
* C(I(Jd0)) -> C(Id1(Jd0))
*
* 0: Inherited default from I (through bridge)
* 1: Inherited default from I
*/
public void test10() throws IOException, ReflectiveOperationException {
compileSpec("C(I(Jd0))");
assertLinkage("C", "J0", LINKAGE_ERROR);
recompileSpec("C(Id1(Jd0))", "I");
assertLinkage("C", "I1", "I1");
}
/*
* C(I(J)) -> C(Id1(Jd0))
*
* 0: Inherited default from I (through bridge)
* 1: Inherited default from I
*/
public void test11() throws IOException, ReflectiveOperationException {
compileSpec("C(I(J))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR);
recompileSpec("C(Id1(Jd0))", "I", "J");
assertLinkage("C", "I1", "I1");
}
/*
* Cc2(B(Ac0)) -> Cc2(Bc1(Ac0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from B
* 2: Declared in C
*/
public void test12() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(B(Ac0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Bc1(Ac0))", "B");
assertLinkage("C", "C2", "B1", "C2");
}
/*
* Cc2(B(Aa0)) -> Cc2(Bc1(Aa0))
*
* 0: Bridge in C (through bridge)
* 1*: Inherited from B
* 2: Declared in C
*/
public void test13() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(B(Aa0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Bc1(Aa0))", "B");
assertLinkage("C", "C2", "B1", "C2");
}
/*
* Cc2(Bc1(A)) -> Cc2(Bc1(Ac0))
*
* 0*: Inherited from A
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test14() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Bc1(A))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Bc1(Ac0))", "A");
assertLinkage("C", "A0", "C2", "C2");
}
/*
* Cc2(Ba1(A)) -> Cc2(Ba1(Ac0))
*
* 0*: Inherited from A
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test15() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Ba1(A))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Ba1(Ac0))", "A");
assertLinkage("C", "A0", "C2", "C2");
}
/*
* Cc2(B(A)) -> Cc2(Bc1(Ac0))
*
* 0*: Inherited from B (through bridge)
* 1*: Inherited from B
* 2: Declared in C
*/
public void test16() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(B(A))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Bc1(Ac0))", "B", "A");
assertLinkage("C", "B1", "B1", "C2");
}
/*
* Cc2(A(Id0)) -> Cc2(Ac1(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test17() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Id0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1(Id0))", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(A(Ia0)) -> Cc2(Ac1(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test18() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Ia0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1(Ia0))", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(Ac1(I)) -> Cc2(Ac1(Id0))
*
* 0*: Inherited from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test19() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Ac1(I))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Ac1(Id0))", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(Aa1(I)) -> Cc2(Aa1(Id0))
*
* 0*: Inherited from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test20() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Aa1(I))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Aa1(Id0))", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(A(I)) -> Cc2(Ac1(Id0))
*
* 0*: Inherited from A (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test21() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(I))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1(Id0))", "A", "I");
assertLinkage("C", "A1", "A1", "C2");
}
/*
* Cc2(J(Id0)) -> Cc2(Jd1(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test22() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(J(Id0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Jd1(Id0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(J(Ia0)) -> Cc2(Jd1(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test23() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(J(Ia0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Jd1(Ia0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(Jd1(I)) -> Cc2(Jd1(Id0))
*
* 0*: Inherited default from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test24() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Jd1(I))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Jd1(Id0))", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(Ja1(I)) -> Cc2(Ja1(Id0))
*
* 0*: Inherited default from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test25() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Ja1(I))");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Ja1(Id0))", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(J(I)) -> Cc2(Jd1(Id0))
*
* 0*: Inherited default from J (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test26() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(J(I))");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Jd1(Id0))", "J", "I");
assertLinkage("C", "J1", "J1", "C2");
}
/*
* C(Ac1, I) -> C(Ac1, Id0)
*
* 0*: Inherited default from I
* 1: Inherited from A
*/
public void test27() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac1,I)");
assertLinkage("C", LINKAGE_ERROR, "A1");
recompileSpec("C(Ac1,Id0)", "I");
assertLinkage("C", "I0", "A1");
}
/*
* C(A, Id0) -> C(Ac1, Id0)
*
* 0*: Inherited default from I
* 1: Inherited from A
*/
public void test28() throws IOException, ReflectiveOperationException {
compileSpec("C(A,Id0)");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(Ac1,Id0)", "A");
assertLinkage("C", "I0", "A1");
}
/*
* C(A, I) -> C(Ac1, Id0)
*
* 0*: Inherited default from I
* 1: Inherited from A
*/
public void test29() throws IOException, ReflectiveOperationException {
compileSpec("C(A,I)");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR);
recompileSpec("C(Ac1,Id0)", "A", "I");
assertLinkage("C", "I0", "A1");
}
/*
* Cc2(Ac1, I) -> Cc2(Ac1, Id0)
*
* 0*: Inherited default from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test30() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Ac1,I)");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Ac1,Id0)", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(Aa1, I) -> Cc2(Aa1, Id0)
*
* 0*: Inherited default from I
* 1: Declared in C (through bridge)
* 2: Declared in C
*/
public void test31() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Aa1,I)");
assertLinkage("C", LINKAGE_ERROR, "C2", "C2");
recompileSpec("Cc2(Aa1,Id0)", "I");
assertLinkage("C", "I0", "C2", "C2");
}
/*
* Cc2(A, Id0) -> Cc2(Ac1, Id0)
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test32() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A,Id0)");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1,Id0)", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(A, Ia0) -> Cc2(Ac1, Ia0)
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test33() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A,Ia0)");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1,Ia0)", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(A, I) -> Cc2(Ac1, Id0)
*
* 0*: Inherited from A
* 1*: Inherited default from I
* 2: Declared in C
*/
public void test34() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A,I)");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1,Id0)", "A", "I");
assertLinkage("C", "I0", "A1", "C2");
}
/*
* Cc2(Id0, J) -> Cc2(Id0, Jd1)
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test35() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Id0,J)");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Id0,Jd1)", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(Ia0, J) -> Cc2(Ia0, Jd1)
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test36() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(Ia0,J)");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ia0,Jd1)", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(I, J) -> Cc2(Id0, Jd1)
*
* 0*: Inherited default from I
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test37() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(I,J)");
assertLinkage("C", LINKAGE_ERROR, LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Id0,Jd1)", "I", "J");
assertLinkage("C", "I0", "J1", "C2");
}
/*
* C(A(Id0), J(Id0)) -> C(Ac1(Id0), J(Id0))
*
* 0: Inherited default from I
* 0*: Inherited from A (through bridge)
* 1*: Inherited from A
*/
public void test38() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),J(Id0))");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(Ac1(Id0),J(Id0))", "A");
assertLinkage("C", "A1", "A1");
}
/*
* C(A(Id0), J(Id0)) -> C(A(Id0), Jd1(Id0))
*
* 0: Inherited default from I
* 0: Inherited default from J (through bridge)
* 1*: Inherited default from J
*/
public void test39() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),J(Id0))");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(A(Id0),Jd1(Id0))", "J");
assertLinkage("C", "J1", "J1");
}
/*
* C(A(Id0), J(Id0)) -> C(Ac2(Id0), Jd1(Id0))
*
* 0: Inherited default from I
* 0*: Inherited from A (new bridge in A beats new bridge in J)
* 1*: Inherited default from J
* 2: Inherited from A
*/
public void test40() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),J(Id0))");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(Ac2(Id0),Jd1(Id0))", "A", "J");
assertLinkage("C", "A2", "J1", "A2");
}
/*
* C(J(Id0), K(Id0)) -> C(Jd1(Id0), K(Id0))
*
* 0: Inherited from I
* 0*: Inherited default from J (through bridge)
* 1: Inherited default from J
*/
public void test41() throws IOException, ReflectiveOperationException {
compileSpec("C(J(Id0),K(Id0))");
assertLinkage("C", "I0", LINKAGE_ERROR);
recompileSpec("C(Jd1(Id0),K(Id0))", "J");
assertLinkage("C", "J1", "J1");
}
/*
* C(Ac2(Id0), J(Id0)) -> C(Ac2(Id0), Jd1(Id0))
*
* 0: Inherited from A (bridge in A beats new bridge in J)
* 1*: Inherited default from J
* 2: Inherited from A
*/
public void test42() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac2(Id0),J(Id0))");
assertLinkage("C", "A2", LINKAGE_ERROR, "A2");
recompileSpec("C(Ac2(Id0),Jd1(Id0))", "J");
assertLinkage("C", "A2", "J1", "A2");
}
/*
* C(Ac2(Ia0), J(Ia0)) -> C(Ac2(Ia0), Jd1(Ia0))
*
* 0: Inherited from A (bridge in A beats new bridge in J)
* 1*: Inherited default from J
* 2: Inherited from A
*/
public void test43() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac2(Ia0),J(Ia0))");
assertLinkage("C", "A2", LINKAGE_ERROR, "A2");
recompileSpec("C(Ac2(Ia0),Jd1(Ia0))", "J");
assertLinkage("C", "A2", "J1", "A2");
}
/*
* C(A(Id0), Jd1(Id0)) -> C(Ac2(Id0), Jd1(Id0))
*
* 0: Inherited from J
* 0*: Inherited from A (new bridge in A beats bridge in J)
* 1: Inherited default from J
* 2*: Inherited from A
*/
public void test44() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),Jd1(Id0))");
assertLinkage("C", "J1", "J1", LINKAGE_ERROR);
recompileSpec("C(Ac2(Id0),Jd1(Id0))", "A");
assertLinkage("C", "A2", "J1", "A2");
}
/*
* C(A(Ia0), Jd1(Ia0)) -> C(Ac2(Ia0), Jd1(Ia0))
*
* 0: Inherited from J
* 0*: Inherited from A (new bridge in A beats bridge in J)
* 1: Inherited default from J
* 2*: Inherited from A
*/
public void test45() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Ia0),Jd1(Ia0))");
assertLinkage("C", "J1", "J1", LINKAGE_ERROR);
recompileSpec("C(Ac2(Ia0),Jd1(Ia0))", "A");
assertLinkage("C", "A2", "J1", "A2");
}
/*
* Cc2(A(Id0), J(Id0)) -> Cc2(Ac1(Id0), J(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test46() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Id0),J(Id0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1(Id0),J(Id0))", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(A(Ia0), J(Ia0)) -> Cc2(Ac1(Ia0), J(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C
*/
public void test47() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Ia0),J(Ia0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Ac1(Ia0),J(Ia0))", "A");
assertLinkage("C", "C2", "A1", "C2");
}
/*
* Cc2(A(Id0), J(Id0)) -> Cc2(A(Id0), Jd1(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test48() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Id0),J(Id0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(A(Id0),Jd1(Id0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(A(Ia0), J(Ia0)) -> Cc2(A(Ia0), Jd1(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test49() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(A(Ia0),J(Ia0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(A(Ia0),Jd1(Ia0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc3(A(Id0), J(Id0)) -> Cc3(Ac1(Id0), Jd2(Id0))
*
* 0: Bridge in C
* 1*: Inherited from A
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test50() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Id0),J(Id0))");
assertLinkage("C", "C3", LINKAGE_ERROR, LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Ac1(Id0),Jd2(Id0))", "A", "J");
assertLinkage("C", "C3", "A1", "J2", "C3");
}
/*
* Cc3(A(Ia0), J(Ia0)) -> Cc3(Ac1(Ia0), Jd2(Ia0))
*
* 0: Bridge in C
* 1*: Inherited from A
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test51() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Ia0),J(Ia0))");
assertLinkage("C", "C3", LINKAGE_ERROR, LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Ac1(Ia0),Jd2(Ia0))", "A", "J");
assertLinkage("C", "C3", "A1", "J2", "C3");
}
/*
* Cc2(J(Id0), K(Id0)) -> Cc2(Jd1(Id0), K(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test52() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(J(Id0),K(Id0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Jd1(Id0),K(Id0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc2(J(Ia0), K(Ia0)) -> Cc2(Jd1(Ia0), K(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2: Declared in C
*/
public void test53() throws IOException, ReflectiveOperationException {
compileSpec("Cc2(J(Ia0),K(Ia0))");
assertLinkage("C", "C2", LINKAGE_ERROR, "C2");
recompileSpec("Cc2(Jd1(Ia0),K(Ia0))", "J");
assertLinkage("C", "C2", "J1", "C2");
}
/*
* Cc3(J(Id0), K(Id0)) -> Cc3(Jd1(Id0), Kd2(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test54() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(J(Id0),K(Id0))");
assertLinkage("C", "C3", LINKAGE_ERROR, LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Id0),Kd2(Id0))", "J", "K");
assertLinkage("C", "C3", "J1", "K2", "C3");
}
/*
* Cc3(J(Ia0), K(Ia0)) -> Cc3(Jd1(Ia0), Kd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited default from J
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test55() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(J(Ia0),K(Ia0))");
assertLinkage("C", "C3", LINKAGE_ERROR, LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Ia0),Kd2(Ia0))", "J", "K");
assertLinkage("C", "C3", "J1", "K2", "C3");
}
/*
* Cc3(Ac1(Id0), J(Id0)) -> Cc3(Ac1(Id0), Jd2(Id0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test56() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Ac1(Id0),J(Id0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Ac1(Id0),Jd2(Id0))", "J");
assertLinkage("C", "C3", "C3", "J2", "C3");
}
/*
* Cc3(Ac1(Ia0), J(Ia0)) -> Cc3(Ac1(Ia0), Jd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test57() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Ac1(Ia0),J(Ia0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Ac1(Ia0),Jd2(Ia0))", "J");
assertLinkage("C", "C3", "C3", "J2", "C3");
}
/*
* Cc3(Aa1(Id0), J(Id0)) -> Cc3(Aa1(Id0), Jd2(Id0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test58() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Aa1(Id0),J(Id0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Aa1(Id0),Jd2(Id0))", "J");
assertLinkage("C", "C3", "C3", "J2", "C3");
}
/*
* Cc3(Aa1(Ia0), J(Ia0)) -> Cc3(Aa1(Ia0), Jd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from J
* 3: Declared in C
*/
public void test59() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Aa1(Ia0),J(Ia0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Aa1(Ia0),Jd2(Ia0))", "J");
assertLinkage("C", "C3", "C3", "J2", "C3");
}
/*
* Cc3(A(Id0), Jd2(Id0)) -> Cc3(Ac1(Id0), Jd2(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C (through bridge)
* 3: Declared in C
*/
public void test60() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Id0),Jd2(Id0))");
assertLinkage("C", "C3", LINKAGE_ERROR, "C3", "C3");
recompileSpec("Cc3(Ac1(Id0),Jd2(Id0))", "A");
assertLinkage("C", "C3", "A1", "C3", "C3");
}
/*
* Cc3(A(Im0), Jd2(Ia0)) -> Cc3(Ac1(Im0), Jd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C (through bridge)
* 3: Declared in C
*/
public void test61() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Ia0),Jd2(Ia0))");
assertLinkage("C", "C3", LINKAGE_ERROR, "C3", "C3");
recompileSpec("Cc3(Ac1(Ia0),Jd2(Ia0))", "A");
assertLinkage("C", "C3", "A1", "C3", "C3");
}
/*
* Cc3(A(Im0), Ja2(Id0)) -> Cc3(Ac1(Id0), Ja2(Id0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C (through bridge)
* 3: Declared in C
*/
public void test62() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Id0),Ja2(Id0))");
assertLinkage("C", "C3", LINKAGE_ERROR, "C3", "C3");
recompileSpec("Cc3(Ac1(Id0),Ja2(Id0))", "A");
assertLinkage("C", "C3", "A1", "C3", "C3");
}
/*
* Cc3(A(Im0), Ja2(Ia0)) -> Cc3(Ac1(Ia0), Ja2(Ia0))
*
* 0: Declared in C (through bridge)
* 1*: Inherited from A
* 2: Declared in C (through bridge)
* 3: Declared in C
*/
public void test63() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(A(Ia0),Ja2(Ia0))");
assertLinkage("C", "C3", LINKAGE_ERROR, "C3", "C3");
recompileSpec("Cc3(Ac1(Ia0),Ja2(Ia0))", "A");
assertLinkage("C", "C3", "A1", "C3", "C3");
}
/*
* Cc3(Jd1(Id0), K(Id0)) -> Cc3(Jd1(Id0), Kd2(Id0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test64() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Jd1(Id0),K(Id0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Id0),Kd2(Id0))", "K");
assertLinkage("C", "C3", "C3", "K2", "C3");
}
/*
* Cc3(Jd1(Ia0), K(Ia0)) -> Cc3(Jd1(Ia0), Kd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test65() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Jd1(Ia0),K(Ia0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Ia0),Kd2(Ia0))", "K");
assertLinkage("C", "C3", "C3", "K2", "C3");
}
/*
* Cc3(Ja1(Id0), K(Id0)) -> Cc3(Ja1(Id0), Kd2(Id0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test66() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Jd1(Id0),K(Id0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Id0),Kd2(Id0))", "K");
assertLinkage("C", "C3", "C3", "K2", "C3");
}
/*
* Cc3(Ja1(Ia0), K(Ia0)) -> Cc3(Ja1(Ia0), Kd2(Ia0))
*
* 0: Declared in C (through bridge)
* 1: Declared in C (through bridge)
* 2*: Inherited default from K
* 3: Declared in C
*/
public void test67() throws IOException, ReflectiveOperationException {
compileSpec("Cc3(Jd1(Ia0),K(Ia0))");
assertLinkage("C", "C3", "C3", LINKAGE_ERROR, "C3");
recompileSpec("Cc3(Jd1(Ia0),Kd2(Ia0))", "K");
assertLinkage("C", "C3", "C3", "K2", "C3");
}
// Dan's set A
public void testA1() throws IOException, ReflectiveOperationException {
compileSpec("C(Id0)");
assertLinkage("C", "I0");
}
public void testA2() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0))");
assertLinkage("C", "I0");
}
public void testA3() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),J)");
assertLinkage("C", "I0");
}
public void testA4() throws IOException, ReflectiveOperationException {
compileSpec("D(C(Id0),Jd0(Id0))");
assertLinkage("D", "J0");
assertLinkage("C", "I0");
}
public void testA5() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0),Jd0)",
"compiler.err.types.incompatible.unrelated.defaults");
}
public void testA6() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Ia0,Jd0))",
"compiler.err.does.not.override.abstract",
"compiler.err.types.incompatible.abstract.default");
}
public void testA7() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Id0,Jd0))",
"compiler.err.types.incompatible.unrelated.defaults");
}
public void testA8() throws IOException, ReflectiveOperationException {
compileSpec("C(A(Ia0),J)", "compiler.err.does.not.override.abstract");
}
public void testA9() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac0(Id0))");
assertLinkage("C", "A0");
}
public void testA10() throws IOException, ReflectiveOperationException {
compileSpec("C(Aa0,I)", "compiler.err.does.not.override.abstract");
}
public void testA11() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac0,Id0)");
assertLinkage("C", "A0");
}
// Dan's set B
/* B1 can't be done, needs a second concrete class D
public void testB1() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(Dc0)");
assertLinkage("C", "C1", "C1");
assertLinkage("D", "A0", LINKAGE_ERROR);
}
*/
public void testB2() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(Ac0)");
assertLinkage("C", "C1", "C1");
}
//??? B3 seems to suggest that we should create an abstract class
//public void testB3() throws IOException, ReflectiveOperationException {
// compileSpec("Ba1(Cc0)");
// assertLinkage("B", "C0", "A1");
//}
// B4 needs too many classes
public void testB5() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(Aa1(Id0))");
assertLinkage("C", "C1", "C1");
}
public void testB6() throws IOException, ReflectiveOperationException {
compileSpec("C(Ac1(Id0))");
assertLinkage("C", "A1", "A1");
}
public void testB7() throws IOException, ReflectiveOperationException {
compileSpec("Cc1(Id0)");
assertLinkage("C", "C1", "C1");
}
public void testB8() throws IOException, ReflectiveOperationException {
compileSpec("C(Jd1(Id0))");
assertLinkage("C", "J1", "J1");
}
// B9 needs too many classes
// The rest of Dan's tests need generics
}
|
apache/hudi | 35,770 | hudi-hadoop-common/src/test/java/org/apache/hudi/common/table/TestTimelineUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.common.table;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanPartitionMetadata;
import org.apache.hudi.avro.model.HoodieRestoreMetadata;
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
import org.apache.hudi.common.HoodieRollbackStat;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.HoodieCleaningPolicy;
import org.apache.hudi.common.model.HoodieReplaceCommitMetadata;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.table.timeline.TimelineUtils;
import org.apache.hudi.common.table.timeline.versioning.v2.ActiveTimelineV2;
import org.apache.hudi.common.table.timeline.versioning.v2.BaseTimelineV2;
import org.apache.hudi.common.table.timeline.versioning.v2.InstantComparatorV2;
import org.apache.hudi.common.testutils.HoodieCommonTestHarness;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.storage.StoragePath;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.hudi.common.table.timeline.HoodieInstant.State.COMPLETED;
import static org.apache.hudi.common.table.timeline.HoodieInstant.State.INFLIGHT;
import static org.apache.hudi.common.table.timeline.HoodieInstant.State.REQUESTED;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.CLEAN_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.CLUSTERING_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.COMMIT_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.COMPACTION_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.DELTA_COMMIT_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.LOG_COMPACTION_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.REPLACE_COMMIT_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.ROLLBACK_ACTION;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.SAVEPOINT_ACTION;
import static org.apache.hudi.common.testutils.HoodieTestUtils.INSTANT_GENERATOR;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Tests {@link TimelineUtils}.
*/
class TestTimelineUtils extends HoodieCommonTestHarness {
@BeforeEach
void setUp() throws Exception {
initMetaClient();
}
@AfterEach
void tearDown() throws Exception {
cleanMetaClient();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void testGetPartitionsWithReplaceOrClusterCommits(boolean withReplace) throws IOException {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
String ts1 = "1";
String replacePartition = "2021/01/01";
String newFilePartition = "2021/01/02";
HoodieInstant instant1 = new HoodieInstant(INFLIGHT, withReplace ? HoodieTimeline.REPLACE_COMMIT_ACTION : HoodieTimeline.CLUSTERING_ACTION, ts1,
InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant1);
// create replace metadata only with replaced file Ids (no new files created)
if (withReplace) {
activeTimeline.saveAsComplete(instant1,
Option.of(getReplaceCommitMetadata(basePath, ts1, replacePartition, 2,
newFilePartition, 0, Collections.emptyMap(), WriteOperationType.CLUSTER)));
} else {
activeTimeline.transitionClusterInflightToComplete(true, instant1,
getReplaceCommitMetadata(basePath, ts1, replacePartition, 2,
newFilePartition, 0, Collections.emptyMap(), WriteOperationType.CLUSTER));
}
metaClient.reloadActiveTimeline();
List<String> partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("0", 10));
assertEquals(1, partitions.size());
assertEquals(replacePartition, partitions.get(0));
String ts2 = "2";
HoodieInstant instant2 = new HoodieInstant(INFLIGHT, withReplace ? HoodieTimeline.REPLACE_COMMIT_ACTION : HoodieTimeline.CLUSTERING_ACTION, ts2,
InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant2);
// create replace metadata only with replaced file Ids (no new files created)
if (withReplace) {
activeTimeline.saveAsComplete(instant2,
Option.of(getReplaceCommitMetadata(basePath, ts2, replacePartition, 0,
newFilePartition, 3, Collections.emptyMap(), WriteOperationType.CLUSTER)));
} else {
activeTimeline.transitionClusterInflightToComplete(true, instant2,
getReplaceCommitMetadata(basePath, ts2, replacePartition, 0,
newFilePartition, 3, Collections.emptyMap(), WriteOperationType.CLUSTER));
}
metaClient.reloadActiveTimeline();
partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("1", 10));
assertEquals(1, partitions.size());
assertEquals(newFilePartition, partitions.get(0));
partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("0", 10));
assertEquals(2, partitions.size());
assertTrue(partitions.contains(replacePartition));
assertTrue(partitions.contains(newFilePartition));
}
@Test
void testGetPartitions() throws IOException {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
String olderPartition = "0"; // older partitions that is modified by all cleans
for (int i = 1; i <= 5; i++) {
String ts = i + "";
HoodieInstant instant = new HoodieInstant(INFLIGHT, COMMIT_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant);
activeTimeline.saveAsComplete(instant, getCommitMetadata(basePath, ts, ts, 2, Collections.emptyMap()));
HoodieInstant cleanInstant = INSTANT_GENERATOR.createNewInstant(INFLIGHT, CLEAN_ACTION, ts);
activeTimeline.createNewInstant(cleanInstant);
activeTimeline.saveAsComplete(cleanInstant, getCleanMetadata(olderPartition, ts, false));
}
metaClient.reloadActiveTimeline();
// verify modified partitions included cleaned data
List<String> partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("1", 10));
assertEquals(5, partitions.size());
assertEquals(partitions, Arrays.asList("0", "2", "3", "4", "5"));
partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsInRange("1", "4"));
assertEquals(4, partitions.size());
assertEquals(partitions, Arrays.asList("0", "2", "3", "4"));
// verify only commit actions
partitions = TimelineUtils.getWrittenPartitions(metaClient.getActiveTimeline().findInstantsAfter("1", 10));
assertEquals(4, partitions.size());
assertEquals(partitions, Arrays.asList("2", "3", "4", "5"));
partitions = TimelineUtils.getWrittenPartitions(metaClient.getActiveTimeline().findInstantsInRange("1", "4"));
assertEquals(3, partitions.size());
assertEquals(partitions, Arrays.asList("2", "3", "4"));
}
@Test
void testGetPartitionsUnPartitioned() throws IOException {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
String partitionPath = "";
for (int i = 1; i <= 5; i++) {
String ts = i + "";
HoodieInstant instant = new HoodieInstant(INFLIGHT, COMMIT_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant);
activeTimeline.saveAsComplete(instant, getCommitMetadata(basePath, partitionPath, ts, 2, Collections.emptyMap()));
HoodieInstant cleanInstant = new HoodieInstant(INFLIGHT, CLEAN_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(cleanInstant);
activeTimeline.saveAsComplete(cleanInstant, getCleanMetadata(partitionPath, ts, false));
}
metaClient.reloadActiveTimeline();
// verify modified partitions included cleaned data
List<String> partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("1", 10));
assertTrue(partitions.isEmpty());
partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsInRange("1", "4"));
assertTrue(partitions.isEmpty());
}
@Test
void testRestoreInstants() throws Exception {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
for (int i = 1; i <= 5; i++) {
String ts = i + "";
HoodieInstant instant = new HoodieInstant(INFLIGHT, HoodieTimeline.RESTORE_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant);
activeTimeline.saveAsComplete(instant, Option.of(getRestoreMetadata(basePath, ts, ts, 2, COMMIT_ACTION)));
}
metaClient.reloadActiveTimeline();
// verify modified partitions included cleaned data
List<String> partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsAfter("1", 10));
assertEquals(partitions, Arrays.asList("2", "3", "4", "5"));
partitions = TimelineUtils.getAffectedPartitions(metaClient.getActiveTimeline().findInstantsInRange("1", "4"));
assertEquals(partitions, Arrays.asList("2", "3", "4"));
}
@Test
void testGetExtraMetadata() throws Exception {
String extraMetadataKey = "test_key";
String extraMetadataValue1 = "test_value1";
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
assertFalse(TimelineUtils.getExtraMetadataFromLatest(metaClient, extraMetadataKey).isPresent());
String ts = "0";
HoodieInstant instant = new HoodieInstant(INFLIGHT, COMMIT_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant);
activeTimeline.saveAsComplete(instant, getCommitMetadata(basePath, ts, ts, 2, Collections.emptyMap()));
ts = "1";
instant = new HoodieInstant(INFLIGHT, COMMIT_ACTION, ts, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant);
Map<String, String> extraMetadata = new HashMap<>();
extraMetadata.put(extraMetadataKey, extraMetadataValue1);
activeTimeline.saveAsComplete(instant, getCommitMetadata(basePath, ts, ts, 2, extraMetadata));
metaClient.reloadActiveTimeline();
// verify modified partitions included cleaned data
verifyExtraMetadataLatestValue(extraMetadataKey, extraMetadataValue1, false);
assertFalse(TimelineUtils.getExtraMetadataFromLatest(metaClient, "unknownKey").isPresent());
// verify adding clustering commit doesn't change behavior of getExtraMetadataFromLatest
String ts2 = "2";
HoodieInstant instant2 = new HoodieInstant(INFLIGHT, HoodieTimeline.CLUSTERING_ACTION, ts2, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant2);
String newValueForMetadata = "newValue2";
extraMetadata.put(extraMetadataKey, newValueForMetadata);
activeTimeline.transitionClusterInflightToComplete(true, instant2,
getReplaceCommitMetadata(basePath, ts2, "p2", 0,
"p2", 3, extraMetadata, WriteOperationType.CLUSTER));
metaClient.reloadActiveTimeline();
verifyExtraMetadataLatestValue(extraMetadataKey, extraMetadataValue1, false);
verifyExtraMetadataLatestValue(extraMetadataKey, newValueForMetadata, true);
assertFalse(TimelineUtils.getExtraMetadataFromLatest(metaClient, "unknownKey").isPresent());
Map<String, Option<String>> extraMetadataEntries = TimelineUtils.getAllExtraMetadataForKey(metaClient, extraMetadataKey);
assertEquals(3, extraMetadataEntries.size());
assertFalse(extraMetadataEntries.get("0").isPresent());
assertTrue(extraMetadataEntries.get("1").isPresent());
assertEquals(extraMetadataValue1, extraMetadataEntries.get("1").get());
assertTrue(extraMetadataEntries.get("2").isPresent());
assertEquals(newValueForMetadata, extraMetadataEntries.get("2").get());
}
@Test
void testGetCommitsTimelineAfter() throws IOException {
// Should only load active timeline
String startTs = "010";
HoodieTableMetaClient mockMetaClient = prepareMetaClient(
Arrays.asList(
new HoodieInstant(INFLIGHT, COMMIT_ACTION, "008", null, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "009", "013", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
Arrays.asList(new HoodieInstant(COMPLETED, COMMIT_ACTION, "001", "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "002", "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
startTs
);
// Commit 009 will be included in result because it has greater commit completion than 010
verifyTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, COMMIT_ACTION, "009", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getCommitsTimelineAfter(mockMetaClient, startTs, Option.of(startTs)));
verify(mockMetaClient, never()).getArchivedTimeline(any());
// Should load both archived and active timeline
startTs = "001";
mockMetaClient = prepareMetaClient(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "009", "009", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
Arrays.asList(new HoodieInstant(COMPLETED, COMMIT_ACTION, "001", "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "002", "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
startTs
);
verifyTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, COMMIT_ACTION, "002", "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getCommitsTimelineAfter(mockMetaClient, startTs, Option.of(startTs)));
verify(mockMetaClient, times(1)).getArchivedTimeline(any());
// Should load both archived and active timeline
startTs = "005";
mockMetaClient = prepareMetaClient(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "003", "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "007", "007", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "009", "009", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
Arrays.asList(new HoodieInstant(COMPLETED, COMMIT_ACTION, "001", "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "002", "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "005", "005", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "006", "006", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "008", "008", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
startTs
);
verifyTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, COMMIT_ACTION, "006", "006", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "008", "008", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "012", "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getCommitsTimelineAfter(mockMetaClient, startTs, Option.of(startTs)));
verify(mockMetaClient, times(1)).getArchivedTimeline(any());
}
private HoodieTableMetaClient prepareMetaClient(
List<HoodieInstant> activeInstants,
List<HoodieInstant> archivedInstants,
String startTs) throws IOException {
HoodieTableMetaClient mockMetaClient = mock(HoodieTableMetaClient.class);
HoodieArchivedTimeline mockArchivedTimeline = mock(HoodieArchivedTimeline.class);
HoodieTableConfig mockTableConfig = mock(HoodieTableConfig.class);
when(mockMetaClient.getBasePath()).thenReturn(new StoragePath("file://dummy/path"));
when(mockMetaClient.scanHoodieInstantsFromFileSystem(any(), any(), eq(true)))
.thenReturn(activeInstants);
when(mockMetaClient.getMetaPath()).thenReturn(new StoragePath("file://dummy/path/.hoodie"));
when(mockMetaClient.getTableConfig()).thenReturn(mockTableConfig);
when(mockMetaClient.getTableConfig().getTimelinePath()).thenReturn("timeline");
HoodieActiveTimeline activeTimeline = new ActiveTimelineV2(mockMetaClient);
when(mockMetaClient.getActiveTimeline())
.thenReturn(activeTimeline);
Set<String> validWriteActions = CollectionUtils.createSet(
COMMIT_ACTION, DELTA_COMMIT_ACTION, COMPACTION_ACTION, LOG_COMPACTION_ACTION, REPLACE_COMMIT_ACTION);
when(mockMetaClient.getArchivedTimeline(any()))
.thenReturn(mockArchivedTimeline);
HoodieTimeline mergedTimeline = new BaseTimelineV2(
archivedInstants.stream()
.filter(instant -> instant.requestedTime().compareTo(startTs) >= 0), null)
.mergeTimeline(activeTimeline);
when(mockArchivedTimeline.mergeTimeline(eq(activeTimeline)))
.thenReturn(mergedTimeline);
HoodieTimeline mergedWriteTimeline = new BaseTimelineV2(
archivedInstants.stream()
.filter(instant -> instant.requestedTime().compareTo(startTs) >= 0), null)
.mergeTimeline(activeTimeline.getWriteTimeline());
when(mockArchivedTimeline.mergeTimeline(argThat(timeline -> timeline.filter(
instant -> instant.getAction().equals(ROLLBACK_ACTION)).countInstants() == 0)))
.thenReturn(mergedWriteTimeline);
return mockMetaClient;
}
void verifyTimeline(List<HoodieInstant> expectedInstants, HoodieTimeline timeline) {
assertEquals(
expectedInstants.stream().sorted().collect(Collectors.toList()),
timeline.getInstants().stream().sorted().collect(Collectors.toList())
);
}
@Test
void testGetEarliestInstantForMetadataArchival() throws IOException {
// Empty timeline
assertEquals(
Option.empty(),
TimelineUtils.getEarliestInstantForMetadataArchival(
prepareActiveTimeline(new ArrayList<>()), false));
// Earlier request clean action before commits
assertEquals(
Option.of(new HoodieInstant(REQUESTED, CLEAN_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getEarliestInstantForMetadataArchival(
prepareActiveTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, CLEAN_ACTION, "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(REQUESTED, CLEAN_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, REPLACE_COMMIT_ACTION, "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(REQUESTED, CLUSTERING_ACTION, "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR))), false));
// No inflight instants
assertEquals(
Option.of(new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getEarliestInstantForMetadataArchival(
prepareActiveTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, CLEAN_ACTION, "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, CLEAN_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, REPLACE_COMMIT_ACTION, "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(REQUESTED, CLUSTERING_ACTION, "012", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR))), false));
// Rollbacks only
assertEquals(
Option.of(new HoodieInstant(INFLIGHT, ROLLBACK_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getEarliestInstantForMetadataArchival(
prepareActiveTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(INFLIGHT, ROLLBACK_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR))), false));
assertEquals(
Option.empty(),
TimelineUtils.getEarliestInstantForMetadataArchival(
prepareActiveTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR))), false));
// With savepoints
HoodieActiveTimeline timeline = prepareActiveTimeline(
Arrays.asList(
new HoodieInstant(COMPLETED, ROLLBACK_ACTION, "001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, SAVEPOINT_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR),
new HoodieInstant(COMPLETED, COMMIT_ACTION, "011", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)));
assertEquals(
Option.of(new HoodieInstant(COMPLETED, COMMIT_ACTION, "003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getEarliestInstantForMetadataArchival(timeline, false));
assertEquals(
Option.of(new HoodieInstant(COMPLETED, COMMIT_ACTION, "010", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR)),
TimelineUtils.getEarliestInstantForMetadataArchival(timeline, true));
}
private HoodieActiveTimeline prepareActiveTimeline(
List<HoodieInstant> activeInstants) throws IOException {
HoodieTableMetaClient mockMetaClient = mock(HoodieTableMetaClient.class);
HoodieTableConfig mockTableConfig = mock(HoodieTableConfig.class);
when(mockMetaClient.getBasePath()).thenReturn(new StoragePath("file://dummy/path"));
when(mockMetaClient.scanHoodieInstantsFromFileSystem(any(), any(), eq(true)))
.thenReturn(activeInstants);
when(mockMetaClient.getMetaPath()).thenReturn(new StoragePath("file://dummy/path/.hoodie"));
when(mockMetaClient.getTableConfig()).thenReturn(mockTableConfig);
when(mockMetaClient.getTableConfig().getTimelinePath()).thenReturn("timeline");
return new ActiveTimelineV2(mockMetaClient);
}
private void verifyExtraMetadataLatestValue(String extraMetadataKey, String expected, boolean includeClustering) {
final Option<String> extraLatestValue;
if (includeClustering) {
extraLatestValue = TimelineUtils.getExtraMetadataFromLatestIncludeClustering(metaClient, extraMetadataKey);
} else {
extraLatestValue = TimelineUtils.getExtraMetadataFromLatest(metaClient, extraMetadataKey);
}
assertTrue(extraLatestValue.isPresent());
assertEquals(expected, extraLatestValue.get());
}
private HoodieRestoreMetadata getRestoreMetadata(String basePath, String partition, String commitTs, int count, String actionType) throws IOException {
List<HoodieRollbackMetadata> rollbackM = new ArrayList<>();
rollbackM.add(getRollbackMetadataInstance(basePath, partition, commitTs, count, actionType));
List<HoodieInstant> rollbackInstants = new ArrayList<>();
rollbackInstants.add(new HoodieInstant(COMPLETED, commitTs, actionType, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR));
return TimelineMetadataUtils.convertRestoreMetadata(commitTs, 200, rollbackInstants,
Collections.singletonMap(commitTs, rollbackM));
}
private HoodieRollbackMetadata getRollbackMetadataInstance(String basePath, String partition, String commitTs, int count, String actionType) {
List<String> deletedFiles = new ArrayList<>();
for (int i = 1; i <= count; i++) {
deletedFiles.add("file-" + i);
}
List<HoodieInstant> rollbacks = new ArrayList<>();
rollbacks.add(new HoodieInstant(COMPLETED, actionType, commitTs, InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR));
HoodieRollbackStat rollbackStat = new HoodieRollbackStat(partition, deletedFiles, Collections.emptyList(),
Collections.emptyMap(), Collections.emptyMap());
List<HoodieRollbackStat> rollbackStats = new ArrayList<>();
rollbackStats.add(rollbackStat);
return TimelineMetadataUtils.convertRollbackMetadata(commitTs, Option.empty(), rollbacks, rollbackStats);
}
private HoodieReplaceCommitMetadata getReplaceCommitMetadata(String basePath, String commitTs, String replacePartition, int replaceCount,
String newFilePartition, int newFileCount, Map<String, String> extraMetadata,
WriteOperationType operationType) {
HoodieReplaceCommitMetadata commit = new HoodieReplaceCommitMetadata();
commit.setOperationType(operationType);
for (int i = 1; i <= newFileCount; i++) {
HoodieWriteStat stat = new HoodieWriteStat();
stat.setFileId(i + "");
stat.setPartitionPath(Paths.get(basePath, newFilePartition).toString());
stat.setPath(commitTs + "." + i + metaClient.getTableConfig().getBaseFileFormat().getFileExtension());
commit.addWriteStat(newFilePartition, stat);
}
Map<String, List<String>> partitionToReplaceFileIds = new HashMap<>();
if (replaceCount > 0) {
partitionToReplaceFileIds.put(replacePartition, new ArrayList<>());
}
for (int i = 1; i <= replaceCount; i++) {
partitionToReplaceFileIds.get(replacePartition).add(FSUtils.createNewFileIdPfx());
}
commit.setPartitionToReplaceFileIds(partitionToReplaceFileIds);
for (Map.Entry<String, String> extraEntries : extraMetadata.entrySet()) {
commit.addMetadata(extraEntries.getKey(), extraEntries.getValue());
}
return commit;
}
private Option<HoodieCleanMetadata> getCleanMetadata(String partition, String time, boolean isPartitionDeleted) {
Map<String, HoodieCleanPartitionMetadata> partitionToFilesCleaned = new HashMap<>();
List<String> filesDeleted = new ArrayList<>();
filesDeleted.add("file-" + partition + "-" + time + "1");
filesDeleted.add("file-" + partition + "-" + time + "2");
HoodieCleanPartitionMetadata partitionMetadata = HoodieCleanPartitionMetadata.newBuilder()
.setPartitionPath(partition)
.setPolicy(HoodieCleaningPolicy.KEEP_LATEST_COMMITS.name())
.setFailedDeleteFiles(Collections.emptyList())
.setDeletePathPatterns(Collections.emptyList())
.setSuccessDeleteFiles(filesDeleted)
.setIsPartitionDeleted(isPartitionDeleted)
.build();
partitionToFilesCleaned.putIfAbsent(partition, partitionMetadata);
return Option.of(HoodieCleanMetadata.newBuilder()
.setVersion(1)
.setTimeTakenInMillis(100)
.setTotalFilesDeleted(1)
.setStartCleanTime(time)
.setEarliestCommitToRetain(time)
.setLastCompletedCommitTimestamp("")
.setPartitionMetadata(partitionToFilesCleaned).build());
}
@Test
void testGetDroppedPartitions() throws Exception {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
HoodieTimeline activeCommitTimeline = activeTimeline.getCommitAndReplaceTimeline();
assertTrue(activeCommitTimeline.empty());
String olderPartition = "p1"; // older partitions that will be deleted by clean commit
// first insert to the older partition
HoodieInstant instant1 = new HoodieInstant(INFLIGHT, COMMIT_ACTION, "00001", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant1);
activeTimeline.saveAsComplete(instant1, getCommitMetadata(basePath, olderPartition, "00001", 2, Collections.emptyMap()));
metaClient.reloadActiveTimeline();
List<String> droppedPartitions = TimelineUtils.getDroppedPartitions(metaClient, Option.empty(), Option.empty());
// no dropped partitions
assertEquals(0, droppedPartitions.size());
// another commit inserts to new partition
HoodieInstant instant2 = new HoodieInstant(INFLIGHT, COMMIT_ACTION, "00002", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(instant2);
activeTimeline.saveAsComplete(instant2, getCommitMetadata(basePath, "p2", "00002", 2, Collections.emptyMap()));
metaClient.reloadActiveTimeline();
droppedPartitions = TimelineUtils.getDroppedPartitions(metaClient, Option.empty(), Option.empty());
// no dropped partitions
assertEquals(0, droppedPartitions.size());
// clean commit deletes older partition
HoodieInstant cleanInstant = new HoodieInstant(INFLIGHT, CLEAN_ACTION, "00003", InstantComparatorV2.REQUESTED_TIME_BASED_COMPARATOR);
activeTimeline.createNewInstant(cleanInstant);
activeTimeline.saveAsComplete(cleanInstant, getCleanMetadata(olderPartition, "00003", true));
metaClient.reloadActiveTimeline();
droppedPartitions = TimelineUtils.getDroppedPartitions(metaClient, Option.empty(), Option.empty());
// older partition is in the list dropped partitions
assertEquals(1, droppedPartitions.size());
assertEquals(olderPartition, droppedPartitions.get(0));
// Archive clean instant.
activeTimeline.deleteInstantFileIfExists(metaClient.getActiveTimeline().getCleanerTimeline().filterCompletedInstants().lastInstant().get());
droppedPartitions = TimelineUtils.getDroppedPartitions(metaClient, Option.empty(), Option.empty());
assertTrue(droppedPartitions.isEmpty());
}
}
|
apache/olingo-odata4 | 35,824 | lib/server-tecsvc/src/main/java/org/apache/olingo/server/tecsvc/provider/EntityTypeProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.server.tecsvc.provider;
import java.util.Arrays;
import org.apache.olingo.commons.api.ex.ODataException;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityType;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlPropertyRef;
import org.apache.olingo.commons.api.edm.provider.CsdlReferentialConstraint;
public class EntityTypeProvider {
public static final FullQualifiedName nameETAllKey = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETAllKey");
public static final FullQualifiedName nameETAllNullable = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETAllNullable");
public static final FullQualifiedName nameETDeriveCollComp = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETDeriveCollComp");
public static final FullQualifiedName nameETAllPrim = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETAllPrim");
public static final FullQualifiedName nameETAllPrimDefaultValues = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETAllPrimDefaultValues");
public static final FullQualifiedName nameETBase = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETBase");
public static final FullQualifiedName nameETBaseTwoKeyNav = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETBaseTwoKeyNav");
public static final FullQualifiedName nameETBaseTwoKeyTwoPrim =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETBaseTwoKeyTwoPrim");
public static final FullQualifiedName nameETCollAllPrim = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETCollAllPrim");
public static final FullQualifiedName nameETCompAllPrim = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETCompAllPrim");
public static final FullQualifiedName nameETCompCollAllPrim = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETCompCollAllPrim");
public static final FullQualifiedName nameETCompCollComp = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETCompCollComp");
public static final FullQualifiedName nameETCompComp = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETCompComp");
public static final FullQualifiedName nameETCompMixPrimCollComp =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETCompMixPrimCollComp");
public static final FullQualifiedName nameETFourKeyAlias = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETFourKeyAlias");
public static final FullQualifiedName nameETKeyNav = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETKeyNav");
public static final FullQualifiedName nameETKeyPrimNav = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETKeyPrimNav");
public static final FullQualifiedName nameETKeyPrimNavDerived = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETKeyPrimNavDerived");
public static final FullQualifiedName nameETKeyNavCont = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETKeyNavCont");
public static final FullQualifiedName nameETTwoKeyNavCont = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETTwoKeyNavCont");
public static final FullQualifiedName nameETMedia = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETMedia");
public static final FullQualifiedName nameETMixPrimCollComp = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETMixPrimCollComp");
public static final FullQualifiedName nameETServerSidePaging =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETServerSidePaging");
public static final FullQualifiedName nameETStreamServerSidePaging =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETStreamServerSidePaging");
public static final FullQualifiedName nameETTwoBase = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoBase");
public static final FullQualifiedName nameETTwoBaseTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoBaseTwoKeyNav");
public static final FullQualifiedName nameETTwoBaseTwoKeyTwoPrim =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoBaseTwoKeyTwoPrim");
public static final FullQualifiedName nameETTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoKeyNav");
public static final FullQualifiedName nameETTwoKeyTwoPrim = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETTwoKeyTwoPrim");
public static final FullQualifiedName nameETTwoPrim = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoPrim");
public static final FullQualifiedName nameETMixEnumDefCollComp = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETMixEnumDefCollComp");
public static final FullQualifiedName nameETStream = new FullQualifiedName(SchemaProvider.NAMESPACE,
"ETWithStream");
public static final FullQualifiedName nameETDelta =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETDelta");
public static final FullQualifiedName nameETPeople = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETPeople");
public static final FullQualifiedName nameETCont = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETCont");
public static final FullQualifiedName nameETBaseCont = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETBaseCont");
public static final FullQualifiedName nameETTwoCont = new FullQualifiedName(SchemaProvider.NAMESPACE, "ETTwoCont");
public static final FullQualifiedName nameETStreamOnComplexProp =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETStreamOnComplexProp");
public static final FullQualifiedName nameETKeyAsSegmentString =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETKeyAsSegmentString");
public static final FullQualifiedName nameETKeyAsSegmentInt =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETKeyAsSegmentInt");
public static final FullQualifiedName nameETComplexKeyAsSegment =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETComplexKeyAsSegment");
public static final FullQualifiedName nameETKeyAsSegmentStringNavKeyAsSegment =
new FullQualifiedName(SchemaProvider.NAMESPACE, "ETKeyAsSegmentStringNavKeyAsSegment");
public CsdlEntityType getEntityType(final FullQualifiedName entityTypeName) throws ODataException {
if(entityTypeName.equals(nameETAllPrimDefaultValues)){
return new CsdlEntityType()
.setName("ETAllPrimDefaultValues")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable_WithDefaultValue,
PropertyProvider.propertyString_NotNullable_WithDefaultValue,
PropertyProvider.propertyBoolean_NotNullable_WithDefaultValue,
PropertyProvider.propertyByte_NotNullable_WithDefaultValue,
PropertyProvider.propertySByte_NotNullable_WithDefaultValue,
PropertyProvider.propertyInt32_NotNullable_WithDefaultValue,
PropertyProvider.propertyInt64_NotNullable_WithDefaultValue,
PropertyProvider.propertySingle_NotNullable_WithDefaultValue,
PropertyProvider.propertyDouble_NotNullable_WithDefaultValue,
PropertyProvider.propertyDecimal_Scale_NotNullable_WithDefaultValue,
PropertyProvider.propertyBinary_NotNullable_WithDefaultValue,
PropertyProvider.propertyDate_NotNullable_WithDefaultValue,
PropertyProvider.propertyDateTimeOffset_NotNullable_WithDefaultValue,
PropertyProvider.propertyDuration_NotNullable_WithDefaultValue,
PropertyProvider.propertyGuid_NotNullable_WithDefaultValue,
PropertyProvider.propertyTimeOfDay_NotNullable_WithDefaultValue,
PropertyProvider.propertyEnumString_ENString_NonNullable_WithDefaultValue,
PropertyProvider.propertyTypeDefinition_TDString_NonNullable_WithDefaultValue
));
}else if (entityTypeName.equals(nameETAllPrim)) {
return new CsdlEntityType()
.setName("ETAllPrim")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString,
PropertyProvider.propertyBoolean, PropertyProvider.propertyByte, PropertyProvider.propertySByte,
PropertyProvider.propertyInt32, PropertyProvider.propertyInt64,
PropertyProvider.propertySingle, PropertyProvider.propertyDouble, PropertyProvider.propertyDecimal_Scale,
PropertyProvider.propertyBinary, PropertyProvider.propertyDate, PropertyProvider.propertyDateTimeOffset,
PropertyProvider.propertyDuration, PropertyProvider.propertyGuid, PropertyProvider.propertyTimeOfDay
))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyETTwoPrimOne_ETTwoPrim,
PropertyProvider.collectionNavPropertyETTwoPrimMany_ETTwoPrim));
} else if (entityTypeName.equals(nameETCollAllPrim)) {
return new CsdlEntityType()
.setName("ETCollAllPrim")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(
Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.collPropertyString_NotNullable,
PropertyProvider.collPropertyBoolean, PropertyProvider.collPropertyByte,
PropertyProvider.collPropertySByte,
PropertyProvider.collPropertyInt16_NotNullable,
PropertyProvider.collPropertyInt32, PropertyProvider.collPropertyInt64,
PropertyProvider.collPropertySingle, PropertyProvider.collPropertyDouble,
PropertyProvider.collPropertyDecimal, PropertyProvider.collPropertyBinary,
PropertyProvider.collPropertyDate_NotNullable,
PropertyProvider.collPropertyDateTimeOffset_NotNullable,
PropertyProvider.collPropertyDuration_NotNullable,
PropertyProvider.collPropertyGuid, PropertyProvider.collPropertyTimeOfDay
));
} else if (entityTypeName.equals(nameETTwoPrim)) {
return new CsdlEntityType()
.setName("ETTwoPrim")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString))
.setNavigationProperties(
Arrays.asList(PropertyProvider.navPropertyETAllPrimOne_ETAllPrim,
PropertyProvider.collectionNavPropertyETAllPrimMany_ETAllPrim));
} else if (entityTypeName.equals(nameETMixPrimCollComp)) {
return new CsdlEntityType()
.setName("ETMixPrimCollComp")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.collPropertyString,
PropertyProvider.propertyComp_CTTwoPrim, PropertyProvider.collPropertyComp_CTTwoPrim));
} else if (entityTypeName.equals(nameETDeriveCollComp)) {
return new CsdlEntityType()
.setName("ETDeriveCollComp")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyComp_CTTwoPrim_Ano,
PropertyProvider.collPropertyComp_CTTwoPrim_Ano));
} else if (entityTypeName.equals(nameETTwoKeyTwoPrim)) {
return new CsdlEntityType()
.setName("ETTwoKeyTwoPrim")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable));
} else if (entityTypeName.equals(nameETBaseTwoKeyTwoPrim)) {
return new CsdlEntityType()
.setName("ETBaseTwoKeyTwoPrim")
.setBaseType(nameETTwoKeyTwoPrim);
} else if (entityTypeName.equals(nameETTwoBaseTwoKeyTwoPrim)) {
return new CsdlEntityType()
.setName("ETTwoBaseTwoKeyTwoPrim")
.setBaseType(nameETTwoKeyTwoPrim);
} else if (entityTypeName.equals(nameETBase)) {
return new CsdlEntityType()
.setName("ETBase")
.setBaseType(nameETTwoPrim)
.setProperties(Arrays.asList(new CsdlProperty()
.setName("AdditionalPropertyString_5")
.setType(PropertyProvider.nameString)));
} else if (entityTypeName.equals(nameETTwoBase)) {
return new CsdlEntityType()
.setName("ETTwoBase")
.setBaseType(nameETBase)
.setProperties(Arrays.asList(new CsdlProperty()
.setName("AdditionalPropertyString_6")
.setType(PropertyProvider.nameString))
);
} else if (entityTypeName.equals(nameETAllKey)) {
return new CsdlEntityType()
.setName("ETAllKey")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyString"),
new CsdlPropertyRef().setName("PropertyBoolean"),
new CsdlPropertyRef().setName("PropertyByte"),
new CsdlPropertyRef().setName("PropertySByte"),
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyInt32"),
new CsdlPropertyRef().setName("PropertyInt64"),
new CsdlPropertyRef().setName("PropertyDecimal"),
new CsdlPropertyRef().setName("PropertyDate"),
new CsdlPropertyRef().setName("PropertyDateTimeOffset"),
new CsdlPropertyRef().setName("PropertyDuration"),
new CsdlPropertyRef().setName("PropertyGuid"),
new CsdlPropertyRef().setName("PropertyTimeOfDay")))
.setProperties(
Arrays.asList(
PropertyProvider.propertyString_NotNullable, PropertyProvider.propertyBoolean_NotNullable,
PropertyProvider.propertyByte_NotNullable, PropertyProvider.propertySByte_NotNullable,
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyInt32_NotNullable,
PropertyProvider.propertyInt64_NotNullable,
PropertyProvider.propertyDecimal_NotNullable, PropertyProvider.propertyDate_NotNullable,
PropertyProvider.propertyDateTimeOffset_NotNullable,
PropertyProvider.propertyDuration_NotNullable, PropertyProvider.propertyGuid_NotNullable,
PropertyProvider.propertyTimeOfDay_NotNullable));
} else if (entityTypeName.equals(nameETCompAllPrim)) {
return new CsdlEntityType()
.setName("ETCompAllPrim")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(
Arrays.asList(PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyComp_CTAllPrim));
} else if (entityTypeName.equals(nameETCompCollAllPrim)) {
return new CsdlEntityType()
.setName("ETCompCollAllPrim")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(
Arrays.asList(PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyComp_CTCollAllPrim));
} else if (entityTypeName.equals(nameETCompComp)) {
return new CsdlEntityType()
.setName("ETCompComp")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(
Arrays.asList(PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyComp_CTCompComp));
} else if (entityTypeName.equals(nameETCompCollComp)) {
return new CsdlEntityType()
.setName("ETCompCollComp")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyComp_CTCompCollComp));
} else if (entityTypeName.equals(nameETMedia)) {
return new CsdlEntityType()
.setName("ETMedia")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(PropertyProvider.propertyInt16_NotNullable))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyETMediaOne_ETKeyNavMany))
.setHasStream(true);
} else if (entityTypeName.equals(nameETServerSidePaging)) {
return new CsdlEntityType()
.setName(nameETServerSidePaging.getName())
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyString_NotNullable));
}else if (entityTypeName.equals(nameETStreamServerSidePaging)) {
return new CsdlEntityType()
.setName(nameETStreamServerSidePaging.getName())
.setKey(Arrays.asList(
new CsdlPropertyRef()
.setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyStream));
} else if (entityTypeName.equals(nameETAllNullable)) {
return new CsdlEntityType()
.setName("ETAllNullable")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyKey")))
.setProperties(
Arrays.asList(
new CsdlProperty()
.setName("PropertyKey").setType(PropertyProvider.nameInt16).setNullable(false),
PropertyProvider.propertyInt16_ExplicitNullable, PropertyProvider.propertyString_ExplicitNullable,
PropertyProvider.propertyBoolean_ExplicitNullable, PropertyProvider.propertyByte_ExplicitNullable,
PropertyProvider.propertySByte_ExplicitNullable, PropertyProvider.propertyInt32_ExplicitNullable,
PropertyProvider.propertyInt64_ExplicitNullable, PropertyProvider.propertySingle_ExplicitNullable,
PropertyProvider.propertyDouble_ExplicitNullable, PropertyProvider.propertyDecimal_ExplicitNullable,
PropertyProvider.propertyBinary_ExplicitNullable, PropertyProvider.propertyDate_ExplicitNullable,
PropertyProvider.propertyDateTimeOffset_ExplicitNullable,
PropertyProvider.propertyDuration_ExplicitNullable, PropertyProvider.propertyGuid_ExplicitNullable,
PropertyProvider.propertyTimeOfDay_ExplicitNullable,
PropertyProvider.collPropertyString_ExplicitNullable,
PropertyProvider.collPropertyBoolean_ExplicitNullable,
PropertyProvider.collPropertyByte_ExplicitNullable,
PropertyProvider.collPropertySByte_ExplicitNullable,
PropertyProvider.collPropertyInt16_ExplicitNullable,
PropertyProvider.collPropertyInt32_ExplicitNullable,
PropertyProvider.collPropertyInt64_ExplicitNullable,
PropertyProvider.collPropertySingle_ExplicitNullable,
PropertyProvider.collPropertyDouble_ExplicitNullable,
PropertyProvider.collPropertyDecimal_ExplicitNullable,
PropertyProvider.collPropertyBinary_ExplicitNullable,
PropertyProvider.collPropertyDate_ExplicitNullable,
PropertyProvider.collPropertyDateTimeOffset_ExplicitNullable,
PropertyProvider.collPropertyDuration_ExplicitNullable,
PropertyProvider.collPropertyGuid_ExplicitNullable,
PropertyProvider.collPropertyTimeOfDay_ExplicitNullable));
} else if (entityTypeName.equals(nameETKeyNav)) {
return new CsdlEntityType()
.setName("ETKeyNav")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(
Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable,
PropertyProvider.propertyCompNav_CTNavFiveProp,
PropertyProvider.propertyCompAllPrim_CTAllPrim, PropertyProvider.propertyCompTwoPrim_CTTwoPrim,
PropertyProvider.collPropertyString, PropertyProvider.collPropertyInt16,
PropertyProvider.collPropertyComp_CTPrimComp,
new CsdlProperty()
.setName("PropertyCompCompNav").setType(ComplexTypeProvider.nameCTCompNav)
))
.setNavigationProperties(
Arrays.asList(
PropertyProvider.navPropertyETTwoKeyNavOne_ETTwoKeyNav_NotNullable,
PropertyProvider.collectionNavPropertyETTwoKeyNavMany_ETTwoKeyNav_WithPartnerERKeyNavOne,
PropertyProvider.navPropertyETKeyNavOne_ETKeyNav,
PropertyProvider.collectionNavPropertyETKeyNavMany_ETKeyNav,
PropertyProvider.navPropertyETMediaOne_ETMedia,
PropertyProvider.collectionNavPropertyETMediaMany_ETMedia));
} else if (entityTypeName.equals(nameETKeyPrimNav)) {
return new CsdlEntityType()
.setName("ETKeyPrimNav")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_ExplicitNullable))
.setNavigationProperties(
Arrays.asList(
PropertyProvider.navPropertyETKeyPrimNavOne_ETKeyPrimNav));
} else if (entityTypeName.equals(nameETKeyPrimNavDerived)) {
return new CsdlEntityType()
.setName("ETKeyPrimNavDerived")
.setBaseType(nameETKeyPrimNav)
.setProperties(Arrays.asList(PropertyProvider.propertyBoolean_NotNullable));
} else if (entityTypeName.equals(nameETKeyNavCont)) {
return new CsdlEntityType()
.setName("ETKeyNavCont")
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable,
PropertyProvider.propertyCompNavCont))
.setNavigationProperties(Arrays.asList(
PropertyProvider.navPropertyETTwoKeyNavOneCT_ETTwoKeyNav,
PropertyProvider.collectionNavPropertyETTwoKeyNavMany_CT_ETTwoKeyNav,
PropertyProvider.navPropertyETTwoKeyNavContOne_ETTwoKeyNav,
PropertyProvider.collectionNavPropertyETTwoKeyNavContMany_ETTwoKeyNav,
PropertyProvider.navPropertyETTwoKeyNavContOne_ETCont,
PropertyProvider.collectionNavPropertyETTwoKeyNavContMany_ETCont,
PropertyProvider.collectionNavPropertyETTwoKeyNavContMany_ETBaseCont
));
} else if (entityTypeName.equals(nameETTwoKeyNavCont)) {
return new CsdlEntityType()
.setName("ETTwoKeyNavCont")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable))
.setNavigationProperties(Arrays.asList(
PropertyProvider.navPropertyETTwoKeyNavContOneCT_ETTwoKeyNav,
PropertyProvider.collectionNavPropertyETTwoKeyNavContMany_CT_ETTwoKeyNav
));
} else if (entityTypeName.equals(nameETTwoKeyNav)) {
return new CsdlEntityType()
.setName("ETTwoKeyNav")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(
Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable,
PropertyProvider.propertyComp_CTPrimComp_NotNullable,
new CsdlProperty().setName("PropertyCompNav").setType(ComplexTypeProvider.nameCTBasePrimCompNav)
.setNullable(false),
PropertyProvider.collPropertyComp_CTPrimComp,
new CsdlProperty().setName("CollPropertyCompNav").setType(ComplexTypeProvider.nameCTNavFiveProp)
.setCollection(true),
PropertyProvider.collPropertyString, PropertyProvider.propertyCompTwoPrim_CTTwoPrim
))
.setNavigationProperties(Arrays.asList(
new CsdlNavigationProperty()
.setName("NavPropertyETKeyNavOne")
.setType(nameETKeyNav)
.setPartner("NavPropertyETTwoKeyNavMany")
.setReferentialConstraints(Arrays.asList(
new CsdlReferentialConstraint()
.setProperty("PropertyInt16")
.setReferencedProperty("PropertyInt16"))),
PropertyProvider.collectionNavPropertyETKeyNavMany_ETKeyNav,
PropertyProvider.navPropertyETTwoKeyNavOne_ETTwoKeyNav,
PropertyProvider.collectionNavPropertyETTwoKeyNavMany_ETTwoKeyNav,
PropertyProvider.navPropertySINav));
} else if (entityTypeName.equals(nameETBaseTwoKeyNav)) {
return new CsdlEntityType()
.setName("ETBaseTwoKeyNav")
.setBaseType(nameETTwoKeyNav)
.setProperties(Arrays.asList(PropertyProvider.propertyDate_ExplicitNullable))
.setNavigationProperties(Arrays.asList(
new CsdlNavigationProperty()
.setName("NavPropertyETBaseTwoKeyNavOne")
.setType(nameETBaseTwoKeyNav),
new CsdlNavigationProperty()
.setName("NavPropertyETTwoBaseTwoKeyNavOne")
.setType(nameETTwoBaseTwoKeyNav)));
} else if (entityTypeName.equals(nameETTwoBaseTwoKeyNav)) {
return new CsdlEntityType()
.setName("ETTwoBaseTwoKeyNav")
.setBaseType(nameETBaseTwoKeyNav)
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(PropertyProvider.propertyGuid_ExplicitNullable))
.setNavigationProperties(Arrays.asList(
new CsdlNavigationProperty()
.setName("NavPropertyETBaseTwoKeyNavMany")
.setType(nameETBaseTwoKeyNav)
.setCollection(true)
));
} else if (entityTypeName.equals(nameETFourKeyAlias)) {
return new CsdlEntityType()
.setName("ETFourKeyAlias")
.setKey(
Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyComp/PropertyInt16").setAlias("KeyAlias1"),
new CsdlPropertyRef().setName("PropertyComp/PropertyString").setAlias("KeyAlias2"),
new CsdlPropertyRef().setName("PropertyCompComp/PropertyComp/PropertyString").setAlias("KeyAlias3")))
.setProperties(
Arrays.asList(PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyComp_CTTwoPrim_NotNullable,
PropertyProvider.propertyCompComp_CTCompComp_NotNullable));
} else if (entityTypeName.equals(nameETCompMixPrimCollComp)) {
return new CsdlEntityType()
.setName("ETCompMixPrimCollComp")
.setKey(Arrays.asList(
new CsdlPropertyRef()
.setName("PropertyInt16")))
.setProperties(
Arrays.asList(PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyMixedPrimCollComp_CTMixPrimCollComp));
} else if (entityTypeName.equals(nameETMixEnumDefCollComp)) {
return new CsdlEntityType()
.setName(nameETMixEnumDefCollComp.getName())
.setKey(Arrays.asList(new CsdlPropertyRef().setName("PropertyEnumString"),
new CsdlPropertyRef().setName("PropertyDefString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyEnumString_ENString_NonNullable,
PropertyProvider.collPropertyEnumString_ENString,
PropertyProvider.propertyTypeDefinition_TDString_NonNullable,
PropertyProvider.collPropertyTypeDefinition_TDString,
PropertyProvider.propertyComp_CTMixEnumTypeDefColl,
PropertyProvider.propertyCompColl_CTMixEnumTypeDefColl));
} else if (entityTypeName.equals(nameETStream)) {
return new CsdlEntityType()
.setName(nameETStream.getName())
.setKey(Arrays.asList(
new CsdlPropertyRef()
.setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyStream));
} else if (entityTypeName.equals(nameETPeople)) {
return new CsdlEntityType()
.setName(nameETPeople.getName())
.setKey(Arrays.asList(new CsdlPropertyRef().setName("id")))
.setProperties(Arrays.asList(
PropertyProvider.propertyId,
PropertyProvider.propertyName))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyFriends));
} else if (entityTypeName.equals(nameETDelta)) {
return new CsdlEntityType()
.setName("ETDelta")
.setBaseType(nameETTwoPrim)
.setNavigationProperties(
Arrays.asList(
PropertyProvider.navPropertyETBaseCont_ETTwoCont,
PropertyProvider.collectionNavPropertyETBaseContMany_ETTwoCont));
} else if (entityTypeName.equals(nameETCont)) {
return new CsdlEntityType()
.setName("ETCont").setBaseType(nameETBaseCont)
.setProperties(Arrays.asList(
PropertyProvider.propertyBoolean, PropertyProvider.propertyByte, PropertyProvider.propertySByte
))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyETCont_ETTwoPrim,
PropertyProvider.collectionNavPropertyETContMany_ETTwoPrim));
} else if (entityTypeName.equals(nameETBaseCont)) {
return new CsdlEntityType()
.setName("ETBaseCont")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString,
PropertyProvider.propertyInt32, PropertyProvider.propertyInt64,
PropertyProvider.propertySingle, PropertyProvider.propertyDouble, PropertyProvider.propertyDecimal_Scale,
PropertyProvider.propertyBinary, PropertyProvider.propertyDate, PropertyProvider.propertyDateTimeOffset,
PropertyProvider.propertyDuration, PropertyProvider.propertyGuid, PropertyProvider.propertyTimeOfDay
))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyETBaseCont_ETTwoPrim,
PropertyProvider.collectionNavPropertyETBaseContMany_ETTwoPrim,
PropertyProvider.collectionNavPropertyETBaseContMany_ETTwoCont,
PropertyProvider.navPropertyETBaseCont_ETTwoCont));
} else if (entityTypeName.equals(nameETTwoCont)) {
return new CsdlEntityType()
.setName("ETTwoCont")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16"),
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable, PropertyProvider.propertyString_NotNullable,
PropertyProvider.propertyInt32, PropertyProvider.propertyInt64,
PropertyProvider.propertySingle, PropertyProvider.propertyDouble, PropertyProvider.propertyDecimal_Scale,
PropertyProvider.propertyBinary, PropertyProvider.propertyDate, PropertyProvider.propertyDateTimeOffset,
PropertyProvider.propertyDuration, PropertyProvider.propertyGuid, PropertyProvider.propertyTimeOfDay
));
} else if (entityTypeName.equals(nameETStreamOnComplexProp)) {
return new CsdlEntityType()
.setName("ETStreamOnComplexProp")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable,
PropertyProvider.propertyInt32, PropertyProvider.propertyEntityStream,
PropertyProvider.propertyCompWithStream_CTWithStreamProp
));
} else if (entityTypeName.equals(nameETKeyAsSegmentString)) {
return new CsdlEntityType()
.setName("ETKeyAsSegmentString")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyString_NotNullable));
} else if (entityTypeName.equals(nameETKeyAsSegmentInt)) {
return new CsdlEntityType()
.setName("ETKeyAsSegmentInt")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyInt16_NotNullable));
} else if (entityTypeName.equals(nameETComplexKeyAsSegment)) {
return new CsdlEntityType()
.setName("ETComplexKeyAsSegment")
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyString"),
new CsdlPropertyRef().setName("PropertyInt16")))
.setProperties(Arrays.asList(
PropertyProvider.propertyString_NotNullable,
PropertyProvider.propertyInt16_NotNullable));
} else if(entityTypeName.equals(nameETKeyAsSegmentStringNavKeyAsSegment)) {
return new CsdlEntityType()
.setName(nameETKeyAsSegmentStringNavKeyAsSegment.getName())
.setKey(Arrays.asList(
new CsdlPropertyRef().setName("PropertyString")))
.setProperties(Arrays.asList(
PropertyProvider.propertyString_NotNullable))
.setNavigationProperties(Arrays.asList(PropertyProvider.navPropertyKeyAsSegment));
}
return null;
}
}
|
googleapis/google-cloud-java | 35,541 | java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3/automl_translation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3;
/**
*
*
* <pre>
* Response message for ListDatasets.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListDatasetsResponse.class,
com.google.cloud.translate.v3.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.translate.v3.Dataset> datasets_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.translate.v3.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.translate.v3.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.cloud.translate.v3.ListDatasetsResponse other =
(com.google.cloud.translate.v3.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.translate.v3.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListDatasets.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.ListDatasetsResponse)
com.google.cloud.translate.v3.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListDatasetsResponse.class,
com.google.cloud.translate.v3.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.cloud.translate.v3.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListDatasetsResponse getDefaultInstanceForType() {
return com.google.cloud.translate.v3.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3.ListDatasetsResponse build() {
com.google.cloud.translate.v3.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListDatasetsResponse buildPartial() {
com.google.cloud.translate.v3.ListDatasetsResponse result =
new com.google.cloud.translate.v3.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.translate.v3.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.translate.v3.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3.ListDatasetsResponse) {
return mergeFrom((com.google.cloud.translate.v3.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3.ListDatasetsResponse other) {
if (other == com.google.cloud.translate.v3.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.translate.v3.Dataset m =
input.readMessage(
com.google.cloud.translate.v3.Dataset.parser(), extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.translate.v3.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ = new java.util.ArrayList<com.google.cloud.translate.v3.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Dataset,
com.google.cloud.translate.v3.Dataset.Builder,
com.google.cloud.translate.v3.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public com.google.cloud.translate.v3.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.cloud.translate.v3.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.cloud.translate.v3.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.translate.v3.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.cloud.translate.v3.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.translate.v3.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.cloud.translate.v3.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.cloud.translate.v3.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public com.google.cloud.translate.v3.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public com.google.cloud.translate.v3.DatasetOrBuilder getDatasetsOrBuilder(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.translate.v3.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public com.google.cloud.translate.v3.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.cloud.translate.v3.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public com.google.cloud.translate.v3.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.cloud.translate.v3.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Dataset.Builder> getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Dataset,
com.google.cloud.translate.v3.Dataset.Builder,
com.google.cloud.translate.v3.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Dataset,
com.google.cloud.translate.v3.Dataset.Builder,
com.google.cloud.translate.v3.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListDatasetsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListDatasetsResponse)
private static final com.google.cloud.translate.v3.ListDatasetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3.ListDatasetsResponse();
}
public static com.google.cloud.translate.v3.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,565 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/CompareVersionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/version.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The response message for
* [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.CompareVersionsResponse}
*/
public final class CompareVersionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.CompareVersionsResponse)
CompareVersionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use CompareVersionsResponse.newBuilder() to construct.
private CompareVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CompareVersionsResponse() {
baseVersionContentJson_ = "";
targetVersionContentJson_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CompareVersionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.class,
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.Builder.class);
}
private int bitField0_;
public static final int BASE_VERSION_CONTENT_JSON_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object baseVersionContentJson_ = "";
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @return The baseVersionContentJson.
*/
@java.lang.Override
public java.lang.String getBaseVersionContentJson() {
java.lang.Object ref = baseVersionContentJson_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
baseVersionContentJson_ = s;
return s;
}
}
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @return The bytes for baseVersionContentJson.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBaseVersionContentJsonBytes() {
java.lang.Object ref = baseVersionContentJson_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
baseVersionContentJson_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_VERSION_CONTENT_JSON_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object targetVersionContentJson_ = "";
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @return The targetVersionContentJson.
*/
@java.lang.Override
public java.lang.String getTargetVersionContentJson() {
java.lang.Object ref = targetVersionContentJson_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetVersionContentJson_ = s;
return s;
}
}
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @return The bytes for targetVersionContentJson.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetVersionContentJsonBytes() {
java.lang.Object ref = targetVersionContentJson_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetVersionContentJson_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int COMPARE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp compareTime_;
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*
* @return Whether the compareTime field is set.
*/
@java.lang.Override
public boolean hasCompareTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*
* @return The compareTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCompareTime() {
return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_;
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCompareTimeOrBuilder() {
return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersionContentJson_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, baseVersionContentJson_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersionContentJson_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, targetVersionContentJson_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCompareTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersionContentJson_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, baseVersionContentJson_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersionContentJson_)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(2, targetVersionContentJson_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCompareTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse other =
(com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse) obj;
if (!getBaseVersionContentJson().equals(other.getBaseVersionContentJson())) return false;
if (!getTargetVersionContentJson().equals(other.getTargetVersionContentJson())) return false;
if (hasCompareTime() != other.hasCompareTime()) return false;
if (hasCompareTime()) {
if (!getCompareTime().equals(other.getCompareTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BASE_VERSION_CONTENT_JSON_FIELD_NUMBER;
hash = (53 * hash) + getBaseVersionContentJson().hashCode();
hash = (37 * hash) + TARGET_VERSION_CONTENT_JSON_FIELD_NUMBER;
hash = (53 * hash) + getTargetVersionContentJson().hashCode();
if (hasCompareTime()) {
hash = (37 * hash) + COMPARE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCompareTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.CompareVersionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.CompareVersionsResponse)
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.class,
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCompareTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
baseVersionContentJson_ = "";
targetVersionContentJson_ = "";
compareTime_ = null;
if (compareTimeBuilder_ != null) {
compareTimeBuilder_.dispose();
compareTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse build() {
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse buildPartial() {
com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse result =
new com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.baseVersionContentJson_ = baseVersionContentJson_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.targetVersionContentJson_ = targetVersionContentJson_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.compareTime_ =
compareTimeBuilder_ == null ? compareTime_ : compareTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse other) {
if (other == com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse.getDefaultInstance())
return this;
if (!other.getBaseVersionContentJson().isEmpty()) {
baseVersionContentJson_ = other.baseVersionContentJson_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTargetVersionContentJson().isEmpty()) {
targetVersionContentJson_ = other.targetVersionContentJson_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCompareTime()) {
mergeCompareTime(other.getCompareTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
baseVersionContentJson_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
targetVersionContentJson_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getCompareTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object baseVersionContentJson_ = "";
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @return The baseVersionContentJson.
*/
public java.lang.String getBaseVersionContentJson() {
java.lang.Object ref = baseVersionContentJson_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
baseVersionContentJson_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @return The bytes for baseVersionContentJson.
*/
public com.google.protobuf.ByteString getBaseVersionContentJsonBytes() {
java.lang.Object ref = baseVersionContentJson_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
baseVersionContentJson_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @param value The baseVersionContentJson to set.
* @return This builder for chaining.
*/
public Builder setBaseVersionContentJson(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
baseVersionContentJson_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearBaseVersionContentJson() {
baseVersionContentJson_ = getDefaultInstance().getBaseVersionContentJson();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the base version content.
* </pre>
*
* <code>string base_version_content_json = 1;</code>
*
* @param value The bytes for baseVersionContentJson to set.
* @return This builder for chaining.
*/
public Builder setBaseVersionContentJsonBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
baseVersionContentJson_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object targetVersionContentJson_ = "";
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @return The targetVersionContentJson.
*/
public java.lang.String getTargetVersionContentJson() {
java.lang.Object ref = targetVersionContentJson_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetVersionContentJson_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @return The bytes for targetVersionContentJson.
*/
public com.google.protobuf.ByteString getTargetVersionContentJsonBytes() {
java.lang.Object ref = targetVersionContentJson_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetVersionContentJson_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @param value The targetVersionContentJson to set.
* @return This builder for chaining.
*/
public Builder setTargetVersionContentJson(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetVersionContentJson_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetVersionContentJson() {
targetVersionContentJson_ = getDefaultInstance().getTargetVersionContentJson();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the target version content.
* </pre>
*
* <code>string target_version_content_json = 2;</code>
*
* @param value The bytes for targetVersionContentJson to set.
* @return This builder for chaining.
*/
public Builder setTargetVersionContentJsonBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetVersionContentJson_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp compareTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
compareTimeBuilder_;
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*
* @return Whether the compareTime field is set.
*/
public boolean hasCompareTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*
* @return The compareTime.
*/
public com.google.protobuf.Timestamp getCompareTime() {
if (compareTimeBuilder_ == null) {
return compareTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: compareTime_;
} else {
return compareTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public Builder setCompareTime(com.google.protobuf.Timestamp value) {
if (compareTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
compareTime_ = value;
} else {
compareTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public Builder setCompareTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (compareTimeBuilder_ == null) {
compareTime_ = builderForValue.build();
} else {
compareTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public Builder mergeCompareTime(com.google.protobuf.Timestamp value) {
if (compareTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& compareTime_ != null
&& compareTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCompareTimeBuilder().mergeFrom(value);
} else {
compareTime_ = value;
}
} else {
compareTimeBuilder_.mergeFrom(value);
}
if (compareTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public Builder clearCompareTime() {
bitField0_ = (bitField0_ & ~0x00000004);
compareTime_ = null;
if (compareTimeBuilder_ != null) {
compareTimeBuilder_.dispose();
compareTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getCompareTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getCompareTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getCompareTimeOrBuilder() {
if (compareTimeBuilder_ != null) {
return compareTimeBuilder_.getMessageOrBuilder();
} else {
return compareTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: compareTime_;
}
}
/**
*
*
* <pre>
* The timestamp when the two version compares.
* </pre>
*
* <code>.google.protobuf.Timestamp compare_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCompareTimeFieldBuilder() {
if (compareTimeBuilder_ == null) {
compareTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCompareTime(), getParentForChildren(), isClean());
compareTime_ = null;
}
return compareTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.CompareVersionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.CompareVersionsResponse)
private static final com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse();
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CompareVersionsResponse> PARSER =
new com.google.protobuf.AbstractParser<CompareVersionsResponse>() {
@java.lang.Override
public CompareVersionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CompareVersionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CompareVersionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,561 | java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/ListConnectionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudbuild/v2/repositories.proto
// Protobuf Java Version: 3.25.8
package com.google.cloudbuild.v2;
/**
*
*
* <pre>
* Message for response to listing Connections.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.ListConnectionsResponse}
*/
public final class ListConnectionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.ListConnectionsResponse)
ListConnectionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListConnectionsResponse.newBuilder() to construct.
private ListConnectionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListConnectionsResponse() {
connections_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListConnectionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_ListConnectionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_ListConnectionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.ListConnectionsResponse.class,
com.google.cloudbuild.v2.ListConnectionsResponse.Builder.class);
}
public static final int CONNECTIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloudbuild.v2.Connection> connections_;
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloudbuild.v2.Connection> getConnectionsList() {
return connections_;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloudbuild.v2.ConnectionOrBuilder>
getConnectionsOrBuilderList() {
return connections_;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
@java.lang.Override
public int getConnectionsCount() {
return connections_.size();
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
@java.lang.Override
public com.google.cloudbuild.v2.Connection getConnections(int index) {
return connections_.get(index);
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
@java.lang.Override
public com.google.cloudbuild.v2.ConnectionOrBuilder getConnectionsOrBuilder(int index) {
return connections_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < connections_.size(); i++) {
output.writeMessage(1, connections_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < connections_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, connections_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloudbuild.v2.ListConnectionsResponse)) {
return super.equals(obj);
}
com.google.cloudbuild.v2.ListConnectionsResponse other =
(com.google.cloudbuild.v2.ListConnectionsResponse) obj;
if (!getConnectionsList().equals(other.getConnectionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getConnectionsCount() > 0) {
hash = (37 * hash) + CONNECTIONS_FIELD_NUMBER;
hash = (53 * hash) + getConnectionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.ListConnectionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloudbuild.v2.ListConnectionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for response to listing Connections.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.ListConnectionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.ListConnectionsResponse)
com.google.cloudbuild.v2.ListConnectionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_ListConnectionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_ListConnectionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.ListConnectionsResponse.class,
com.google.cloudbuild.v2.ListConnectionsResponse.Builder.class);
}
// Construct using com.google.cloudbuild.v2.ListConnectionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (connectionsBuilder_ == null) {
connections_ = java.util.Collections.emptyList();
} else {
connections_ = null;
connectionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_ListConnectionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloudbuild.v2.ListConnectionsResponse getDefaultInstanceForType() {
return com.google.cloudbuild.v2.ListConnectionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloudbuild.v2.ListConnectionsResponse build() {
com.google.cloudbuild.v2.ListConnectionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloudbuild.v2.ListConnectionsResponse buildPartial() {
com.google.cloudbuild.v2.ListConnectionsResponse result =
new com.google.cloudbuild.v2.ListConnectionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloudbuild.v2.ListConnectionsResponse result) {
if (connectionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
connections_ = java.util.Collections.unmodifiableList(connections_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.connections_ = connections_;
} else {
result.connections_ = connectionsBuilder_.build();
}
}
private void buildPartial0(com.google.cloudbuild.v2.ListConnectionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloudbuild.v2.ListConnectionsResponse) {
return mergeFrom((com.google.cloudbuild.v2.ListConnectionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloudbuild.v2.ListConnectionsResponse other) {
if (other == com.google.cloudbuild.v2.ListConnectionsResponse.getDefaultInstance())
return this;
if (connectionsBuilder_ == null) {
if (!other.connections_.isEmpty()) {
if (connections_.isEmpty()) {
connections_ = other.connections_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureConnectionsIsMutable();
connections_.addAll(other.connections_);
}
onChanged();
}
} else {
if (!other.connections_.isEmpty()) {
if (connectionsBuilder_.isEmpty()) {
connectionsBuilder_.dispose();
connectionsBuilder_ = null;
connections_ = other.connections_;
bitField0_ = (bitField0_ & ~0x00000001);
connectionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getConnectionsFieldBuilder()
: null;
} else {
connectionsBuilder_.addAllMessages(other.connections_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloudbuild.v2.Connection m =
input.readMessage(
com.google.cloudbuild.v2.Connection.parser(), extensionRegistry);
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(m);
} else {
connectionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloudbuild.v2.Connection> connections_ =
java.util.Collections.emptyList();
private void ensureConnectionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
connections_ = new java.util.ArrayList<com.google.cloudbuild.v2.Connection>(connections_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v2.Connection,
com.google.cloudbuild.v2.Connection.Builder,
com.google.cloudbuild.v2.ConnectionOrBuilder>
connectionsBuilder_;
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public java.util.List<com.google.cloudbuild.v2.Connection> getConnectionsList() {
if (connectionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(connections_);
} else {
return connectionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public int getConnectionsCount() {
if (connectionsBuilder_ == null) {
return connections_.size();
} else {
return connectionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public com.google.cloudbuild.v2.Connection getConnections(int index) {
if (connectionsBuilder_ == null) {
return connections_.get(index);
} else {
return connectionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder setConnections(int index, com.google.cloudbuild.v2.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.set(index, value);
onChanged();
} else {
connectionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder setConnections(
int index, com.google.cloudbuild.v2.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.set(index, builderForValue.build());
onChanged();
} else {
connectionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder addConnections(com.google.cloudbuild.v2.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.add(value);
onChanged();
} else {
connectionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder addConnections(int index, com.google.cloudbuild.v2.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.add(index, value);
onChanged();
} else {
connectionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder addConnections(com.google.cloudbuild.v2.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(builderForValue.build());
onChanged();
} else {
connectionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder addConnections(
int index, com.google.cloudbuild.v2.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(index, builderForValue.build());
onChanged();
} else {
connectionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder addAllConnections(
java.lang.Iterable<? extends com.google.cloudbuild.v2.Connection> values) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, connections_);
onChanged();
} else {
connectionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder clearConnections() {
if (connectionsBuilder_ == null) {
connections_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
connectionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public Builder removeConnections(int index) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.remove(index);
onChanged();
} else {
connectionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public com.google.cloudbuild.v2.Connection.Builder getConnectionsBuilder(int index) {
return getConnectionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public com.google.cloudbuild.v2.ConnectionOrBuilder getConnectionsOrBuilder(int index) {
if (connectionsBuilder_ == null) {
return connections_.get(index);
} else {
return connectionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public java.util.List<? extends com.google.cloudbuild.v2.ConnectionOrBuilder>
getConnectionsOrBuilderList() {
if (connectionsBuilder_ != null) {
return connectionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(connections_);
}
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public com.google.cloudbuild.v2.Connection.Builder addConnectionsBuilder() {
return getConnectionsFieldBuilder()
.addBuilder(com.google.cloudbuild.v2.Connection.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public com.google.cloudbuild.v2.Connection.Builder addConnectionsBuilder(int index) {
return getConnectionsFieldBuilder()
.addBuilder(index, com.google.cloudbuild.v2.Connection.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Connections.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v2.Connection connections = 1;</code>
*/
public java.util.List<com.google.cloudbuild.v2.Connection.Builder> getConnectionsBuilderList() {
return getConnectionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v2.Connection,
com.google.cloudbuild.v2.Connection.Builder,
com.google.cloudbuild.v2.ConnectionOrBuilder>
getConnectionsFieldBuilder() {
if (connectionsBuilder_ == null) {
connectionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v2.Connection,
com.google.cloudbuild.v2.Connection.Builder,
com.google.cloudbuild.v2.ConnectionOrBuilder>(
connections_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
connections_ = null;
}
return connectionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.ListConnectionsResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.ListConnectionsResponse)
private static final com.google.cloudbuild.v2.ListConnectionsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloudbuild.v2.ListConnectionsResponse();
}
public static com.google.cloudbuild.v2.ListConnectionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListConnectionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListConnectionsResponse>() {
@java.lang.Override
public ListConnectionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListConnectionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListConnectionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloudbuild.v2.ListConnectionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.