repo_id stringclasses 875 values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 37,903 | java-datastream/proto-google-cloud-datastream-v1alpha1/src/main/java/com/google/cloud/datastream/v1alpha1/MysqlTable.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1alpha1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1alpha1;
/**
*
*
* <pre>
* MySQL table.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.MysqlTable}
*/
public final class MysqlTable extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.MysqlTable)
MysqlTableOrBuilder {
private static final long serialVersionUID = 0L;
// Use MysqlTable.newBuilder() to construct.
private MysqlTable(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MysqlTable() {
tableName_ = "";
mysqlColumns_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MysqlTable();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlTable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlTable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.MysqlTable.class,
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder.class);
}
public static final int TABLE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object tableName_ = "";
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @return The tableName.
*/
@java.lang.Override
public java.lang.String getTableName() {
java.lang.Object ref = tableName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tableName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @return The bytes for tableName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTableNameBytes() {
java.lang.Object ref = tableName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MYSQL_COLUMNS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1alpha1.MysqlColumn> mysqlColumns_;
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlColumn> getMysqlColumnsList() {
return mysqlColumns_;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder>
getMysqlColumnsOrBuilderList() {
return mysqlColumns_;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public int getMysqlColumnsCount() {
return mysqlColumns_.size();
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlColumn getMysqlColumns(int index) {
return mysqlColumns_.get(index);
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder getMysqlColumnsOrBuilder(
int index) {
return mysqlColumns_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tableName_);
}
for (int i = 0; i < mysqlColumns_.size(); i++) {
output.writeMessage(2, mysqlColumns_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tableName_);
}
for (int i = 0; i < mysqlColumns_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, mysqlColumns_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1alpha1.MysqlTable)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1alpha1.MysqlTable other =
(com.google.cloud.datastream.v1alpha1.MysqlTable) obj;
if (!getTableName().equals(other.getTableName())) return false;
if (!getMysqlColumnsList().equals(other.getMysqlColumnsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
if (getMysqlColumnsCount() > 0) {
hash = (37 * hash) + MYSQL_COLUMNS_FIELD_NUMBER;
hash = (53 * hash) + getMysqlColumnsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1alpha1.MysqlTable prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* MySQL table.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.MysqlTable}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.MysqlTable)
com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlTable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlTable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.MysqlTable.class,
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder.class);
}
// Construct using com.google.cloud.datastream.v1alpha1.MysqlTable.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tableName_ = "";
if (mysqlColumnsBuilder_ == null) {
mysqlColumns_ = java.util.Collections.emptyList();
} else {
mysqlColumns_ = null;
mysqlColumnsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlTable_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTable getDefaultInstanceForType() {
return com.google.cloud.datastream.v1alpha1.MysqlTable.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTable build() {
com.google.cloud.datastream.v1alpha1.MysqlTable result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTable buildPartial() {
com.google.cloud.datastream.v1alpha1.MysqlTable result =
new com.google.cloud.datastream.v1alpha1.MysqlTable(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datastream.v1alpha1.MysqlTable result) {
if (mysqlColumnsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
mysqlColumns_ = java.util.Collections.unmodifiableList(mysqlColumns_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.mysqlColumns_ = mysqlColumns_;
} else {
result.mysqlColumns_ = mysqlColumnsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1alpha1.MysqlTable result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tableName_ = tableName_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1alpha1.MysqlTable) {
return mergeFrom((com.google.cloud.datastream.v1alpha1.MysqlTable) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1alpha1.MysqlTable other) {
if (other == com.google.cloud.datastream.v1alpha1.MysqlTable.getDefaultInstance())
return this;
if (!other.getTableName().isEmpty()) {
tableName_ = other.tableName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (mysqlColumnsBuilder_ == null) {
if (!other.mysqlColumns_.isEmpty()) {
if (mysqlColumns_.isEmpty()) {
mysqlColumns_ = other.mysqlColumns_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMysqlColumnsIsMutable();
mysqlColumns_.addAll(other.mysqlColumns_);
}
onChanged();
}
} else {
if (!other.mysqlColumns_.isEmpty()) {
if (mysqlColumnsBuilder_.isEmpty()) {
mysqlColumnsBuilder_.dispose();
mysqlColumnsBuilder_ = null;
mysqlColumns_ = other.mysqlColumns_;
bitField0_ = (bitField0_ & ~0x00000002);
mysqlColumnsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMysqlColumnsFieldBuilder()
: null;
} else {
mysqlColumnsBuilder_.addAllMessages(other.mysqlColumns_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
tableName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1alpha1.MysqlColumn m =
input.readMessage(
com.google.cloud.datastream.v1alpha1.MysqlColumn.parser(),
extensionRegistry);
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(m);
} else {
mysqlColumnsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object tableName_ = "";
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @return The tableName.
*/
public java.lang.String getTableName() {
java.lang.Object ref = tableName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tableName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @return The bytes for tableName.
*/
public com.google.protobuf.ByteString getTableNameBytes() {
java.lang.Object ref = tableName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tableName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @param value The tableName to set.
* @return This builder for chaining.
*/
public Builder setTableName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tableName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTableName() {
tableName_ = getDefaultInstance().getTableName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table_name = 1;</code>
*
* @param value The bytes for tableName to set.
* @return This builder for chaining.
*/
public Builder setTableNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tableName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1alpha1.MysqlColumn> mysqlColumns_ =
java.util.Collections.emptyList();
private void ensureMysqlColumnsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
mysqlColumns_ =
new java.util.ArrayList<com.google.cloud.datastream.v1alpha1.MysqlColumn>(
mysqlColumns_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlColumn,
com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder,
com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder>
mysqlColumnsBuilder_;
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlColumn> getMysqlColumnsList() {
if (mysqlColumnsBuilder_ == null) {
return java.util.Collections.unmodifiableList(mysqlColumns_);
} else {
return mysqlColumnsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public int getMysqlColumnsCount() {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.size();
} else {
return mysqlColumnsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlColumn getMysqlColumns(int index) {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.get(index);
} else {
return mysqlColumnsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder setMysqlColumns(
int index, com.google.cloud.datastream.v1alpha1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.set(index, value);
onChanged();
} else {
mysqlColumnsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder setMysqlColumns(
int index, com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.set(index, builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(com.google.cloud.datastream.v1alpha1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(value);
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(
int index, com.google.cloud.datastream.v1alpha1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(index, value);
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(
com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(
int index, com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(index, builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addAllMysqlColumns(
java.lang.Iterable<? extends com.google.cloud.datastream.v1alpha1.MysqlColumn> values) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mysqlColumns_);
onChanged();
} else {
mysqlColumnsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder clearMysqlColumns() {
if (mysqlColumnsBuilder_ == null) {
mysqlColumns_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
mysqlColumnsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder removeMysqlColumns(int index) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.remove(index);
onChanged();
} else {
mysqlColumnsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder getMysqlColumnsBuilder(
int index) {
return getMysqlColumnsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder getMysqlColumnsOrBuilder(
int index) {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.get(index);
} else {
return mysqlColumnsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder>
getMysqlColumnsOrBuilderList() {
if (mysqlColumnsBuilder_ != null) {
return mysqlColumnsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mysqlColumns_);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder addMysqlColumnsBuilder() {
return getMysqlColumnsFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1alpha1.MysqlColumn.getDefaultInstance());
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder addMysqlColumnsBuilder(
int index) {
return getMysqlColumnsFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1alpha1.MysqlColumn.getDefaultInstance());
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude lists, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder>
getMysqlColumnsBuilderList() {
return getMysqlColumnsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlColumn,
com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder,
com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder>
getMysqlColumnsFieldBuilder() {
if (mysqlColumnsBuilder_ == null) {
mysqlColumnsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlColumn,
com.google.cloud.datastream.v1alpha1.MysqlColumn.Builder,
com.google.cloud.datastream.v1alpha1.MysqlColumnOrBuilder>(
mysqlColumns_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
mysqlColumns_ = null;
}
return mysqlColumnsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.MysqlTable)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.MysqlTable)
private static final com.google.cloud.datastream.v1alpha1.MysqlTable DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.MysqlTable();
}
public static com.google.cloud.datastream.v1alpha1.MysqlTable getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MysqlTable> PARSER =
new com.google.protobuf.AbstractParser<MysqlTable>() {
@java.lang.Override
public MysqlTable parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MysqlTable> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MysqlTable> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTable getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,915 | java-redis/proto-google-cloud-redis-v1/src/main/java/com/google/cloud/redis/v1/CreateInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/redis/v1/cloud_redis.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.redis.v1;
/**
*
*
* <pre>
* Request for
* [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
* </pre>
*
* Protobuf type {@code google.cloud.redis.v1.CreateInstanceRequest}
*/
public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.redis.v1.CreateInstanceRequest)
CreateInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateInstanceRequest.newBuilder() to construct.
private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateInstanceRequest() {
parent_ = "";
instanceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.v1.CloudRedisServiceV1Proto
.internal_static_google_cloud_redis_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.v1.CloudRedisServiceV1Proto
.internal_static_google_cloud_redis_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.v1.CreateInstanceRequest.class,
com.google.cloud.redis.v1.CreateInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_FIELD_NUMBER = 3;
private com.google.cloud.redis.v1.Instance instance_;
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.redis.v1.Instance getInstance() {
return instance_ == null ? com.google.cloud.redis.v1.Instance.getDefaultInstance() : instance_;
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.redis.v1.InstanceOrBuilder getInstanceOrBuilder() {
return instance_ == null ? com.google.cloud.redis.v1.Instance.getDefaultInstance() : instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.redis.v1.CreateInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.redis.v1.CreateInstanceRequest other =
(com.google.cloud.redis.v1.CreateInstanceRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.redis.v1.CreateInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
* </pre>
*
* Protobuf type {@code google.cloud.redis.v1.CreateInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.redis.v1.CreateInstanceRequest)
com.google.cloud.redis.v1.CreateInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.v1.CloudRedisServiceV1Proto
.internal_static_google_cloud_redis_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.v1.CloudRedisServiceV1Proto
.internal_static_google_cloud_redis_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.v1.CreateInstanceRequest.class,
com.google.cloud.redis.v1.CreateInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.redis.v1.CreateInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
instanceId_ = "";
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.redis.v1.CloudRedisServiceV1Proto
.internal_static_google_cloud_redis_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.redis.v1.CreateInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.redis.v1.CreateInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.redis.v1.CreateInstanceRequest build() {
com.google.cloud.redis.v1.CreateInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.redis.v1.CreateInstanceRequest buildPartial() {
com.google.cloud.redis.v1.CreateInstanceRequest result =
new com.google.cloud.redis.v1.CreateInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.redis.v1.CreateInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.redis.v1.CreateInstanceRequest) {
return mergeFrom((com.google.cloud.redis.v1.CreateInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.redis.v1.CreateInstanceRequest other) {
if (other == com.google.cloud.redis.v1.CreateInstanceRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the instance location using the form:
* `projects/{project_id}/locations/{location_id}`
* where `location_id` refers to a GCP region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The logical name of the Redis instance in the customer project
* with the following restrictions:
*
* * Must contain only lowercase letters, numbers, and hyphens.
* * Must start with a letter.
* * Must be between 1-40 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project / location
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.redis.v1.Instance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.v1.Instance,
com.google.cloud.redis.v1.Instance.Builder,
com.google.cloud.redis.v1.InstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.redis.v1.Instance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.redis.v1.Instance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.redis.v1.Instance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.redis.v1.Instance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(com.google.cloud.redis.v1.Instance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& instance_ != null
&& instance_ != com.google.cloud.redis.v1.Instance.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000004);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.redis.v1.Instance.Builder getInstanceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.redis.v1.InstanceOrBuilder getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.redis.v1.Instance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. A Redis [Instance] resource
* </pre>
*
* <code>.google.cloud.redis.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.v1.Instance,
com.google.cloud.redis.v1.Instance.Builder,
com.google.cloud.redis.v1.InstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.v1.Instance,
com.google.cloud.redis.v1.Instance.Builder,
com.google.cloud.redis.v1.InstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.redis.v1.CreateInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.redis.v1.CreateInstanceRequest)
private static final com.google.cloud.redis.v1.CreateInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.redis.v1.CreateInstanceRequest();
}
public static com.google.cloud.redis.v1.CreateInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateInstanceRequest>() {
@java.lang.Override
public CreateInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.redis.v1.CreateInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ofbiz-framework | 37,850 | applications/accounting/src/main/java/org/apache/ofbiz/accounting/thirdparty/orbital/OrbitalPaymentServices.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.accounting.thirdparty.orbital;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.apache.ofbiz.accounting.payment.PaymentGatewayServices;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.UtilFormatOut;
import org.apache.ofbiz.base.util.UtilNumber;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.util.EntityQuery;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.ModelService;
import org.apache.ofbiz.service.ServiceUtil;
import com.paymentech.orbital.sdk.configurator.Configurator;
import com.paymentech.orbital.sdk.interfaces.RequestIF;
import com.paymentech.orbital.sdk.interfaces.ResponseIF;
import com.paymentech.orbital.sdk.interfaces.TransactionProcessorIF;
import com.paymentech.orbital.sdk.request.FieldNotFoundException;
import com.paymentech.orbital.sdk.request.Request;
import com.paymentech.orbital.sdk.transactionProcessor.TransactionException;
import com.paymentech.orbital.sdk.transactionProcessor.TransactionProcessor;
import com.paymentech.orbital.sdk.util.exceptions.InitializationException;
public class OrbitalPaymentServices {
private static final String MODULE = OrbitalPaymentServices.class.getName();
private static final String RESOURCE = "AccountingUiLabels";
private static final String ERROR = "Error";
private static final int DECIMALS = UtilNumber.getBigDecimalScale("invoice.decimals");
private static final RoundingMode ROUNDING = UtilNumber.getRoundingMode("invoice.rounding");
public static final String BIN_VALUE = "000002";
public static TransactionProcessorIF tp = null;
public static ResponseIF response = null;
public static RequestIF request = null;
public static Map<String, Object> ccAuth(DispatchContext ctx, Map<String, Object> context) {
Delegator delegator = ctx.getDelegator();
Map<String, Object> results = ServiceUtil.returnSuccess();
Map<String, Object> props = buildOrbitalProperties(context, delegator);
props.put("transType", "AUTH_ONLY");
//Tell the request object which template to use (see RequestIF.java)
try {
request = new Request(RequestIF.NEW_ORDER_TRANSACTION);
} catch (InitializationException e) {
Debug.logError(e, "Error in request initialization", MODULE);
}
buildAuthOrAuthCaptureTransaction(context, delegator, props, request, results);
Map<String, Object> validateResults = validateRequest(context, props, request);
String respMsg = (String) validateResults.get(ModelService.RESPONSE_MESSAGE);
if (ModelService.RESPOND_ERROR.equals(respMsg)) {
results.put(ModelService.ERROR_MESSAGE, "Validation Failed - invalid values");
return results;
}
initializeTransactionProcessor();
Map<String, Object> processCardResponseContext = processCard(request);
// For Debugging Purpose
printTransResult((ResponseIF) processCardResponseContext.get("processCardResponse"));
processAuthTransResult(processCardResponseContext, results);
return results;
}
public static Map<String, Object> ccAuthCapture(DispatchContext ctx, Map<String, Object> context) {
Delegator delegator = ctx.getDelegator();
Map<String, Object> results = ServiceUtil.returnSuccess();
Map<String, Object> props = buildOrbitalProperties(context, delegator);
props.put("transType", "AUTH_CAPTURE");
//Tell the request object which template to use (see RequestIF.java)
try {
request = new Request(RequestIF.NEW_ORDER_TRANSACTION);
} catch (InitializationException e) {
Debug.logError(e, "Error in request initialization", MODULE);
}
buildAuthOrAuthCaptureTransaction(context, delegator, props, request, results);
Map<String, Object> validateResults = validateRequest(context, props, request);
String respMsg = (String) validateResults.get(ModelService.RESPONSE_MESSAGE);
if (ModelService.RESPOND_ERROR.equals(respMsg)) {
results.put(ModelService.ERROR_MESSAGE, "Validation Failed - invalid values");
return results;
}
initializeTransactionProcessor();
Map<String, Object> processCardResponseContext = processCard(request);
// For Debugging Purpose
printTransResult((ResponseIF) processCardResponseContext.get("processCardResponse"));
processAuthCaptureTransResult(processCardResponseContext, results);
return results;
}
public static Map<String, Object> ccCapture(DispatchContext ctx, Map<String, Object> context) {
Delegator delegator = ctx.getDelegator();
Map<String, Object> results = ServiceUtil.returnSuccess();
Map<String, Object> props = buildOrbitalProperties(context, delegator);
Locale locale = (Locale) context.get("locale");
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
GenericValue creditCard = null;
try {
creditCard = orderPaymentPreference.getRelatedOne("CreditCard", false);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentUnableToGetCCInfo", locale));
}
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotCapture", locale));
}
context.put("creditCard", creditCard);
context.put("authTransaction", authTransaction);
context.put("orderId", orderPaymentPreference.getString("orderId"));
props.put("transType", "PRIOR_AUTH_CAPTURE");
//Tell the request object which template to use (see RequestIF.java)
try {
request = new Request(RequestIF.MARK_FOR_CAPTURE_TRANSACTION);
} catch (InitializationException e) {
Debug.logError(e, "Error in request initialization", MODULE);
}
buildCaptureTransaction(context, delegator, props, request, results);
Map<String, Object> validateResults = validateRequest(context, props, request);
String respMsg = (String) validateResults.get(ModelService.RESPONSE_MESSAGE);
if (ModelService.RESPOND_ERROR.equals(respMsg)) {
results.put(ModelService.ERROR_MESSAGE, "Validation Failed - invalid values");
return results;
}
initializeTransactionProcessor();
Map<String, Object> processCardResponseContext = processCard(request);
// For Debugging Purpose
printTransResult((ResponseIF) processCardResponseContext.get("processCardResponse"));
processCaptureTransResult(processCardResponseContext, results);
return results;
}
public static Map<String, Object> ccRefund(DispatchContext ctx, Map<String, Object> context) {
Locale locale = (Locale) context.get("locale");
Delegator delegator = ctx.getDelegator();
Map<String, Object> results = ServiceUtil.returnSuccess();
Map<String, Object> props = buildOrbitalProperties(context, delegator);
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
GenericValue creditCard = null;
try {
creditCard = orderPaymentPreference.getRelatedOne("CreditCard", false);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentUnableToGetCCInfo", locale));
}
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRefund", locale));
}
context.put("creditCard", creditCard);
context.put("authTransaction", authTransaction);
context.put("orderId", orderPaymentPreference.getString("orderId"));
//Tell the request object which template to use (see RequestIF.java)
try {
request = new Request(RequestIF.NEW_ORDER_TRANSACTION);
} catch (InitializationException e) {
Debug.logError(e, "Error in request initialization", MODULE);
}
buildRefundTransaction(context, props, request, results);
Map<String, Object> validateResults = validateRequest(context, props, request);
String respMsg = (String) validateResults.get(ModelService.RESPONSE_MESSAGE);
if (ModelService.RESPOND_ERROR.equals(respMsg)) {
results.put(ModelService.ERROR_MESSAGE, "Validation Failed - invalid values");
return results;
}
initializeTransactionProcessor();
Map<String, Object> processCardResponseContext = processCard(request);
// For Debugging Purpose
printTransResult((ResponseIF) processCardResponseContext.get("processCardResponse"));
processRefundTransResult(processCardResponseContext, results);
return results;
}
public static Map<String, Object> ccRelease(DispatchContext ctx, Map<String, Object> context) {
Locale locale = (Locale) context.get("locale");
Delegator delegator = ctx.getDelegator();
Map<String, Object> results = ServiceUtil.returnSuccess();
Map<String, Object> props = buildOrbitalProperties(context, delegator);
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
try {
orderPaymentPreference.getRelatedOne("CreditCard", false);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentUnableToGetCCInfo", locale));
}
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRelease", locale));
}
context.put("authTransaction", authTransaction);
context.put("orderId", orderPaymentPreference.getString("orderId"));
//Tell the request object which template to use (see RequestIF.java)
try {
request = new Request(RequestIF.REVERSE_TRANSACTION);
} catch (InitializationException e) {
Debug.logError(e, "Error in request initialization", MODULE);
}
buildReleaseTransaction(context, delegator, props, request, results);
Map<String, Object> validateResults = validateRequest(context, props, request);
String respMsg = (String) validateResults.get(ModelService.RESPONSE_MESSAGE);
if (ModelService.RESPOND_ERROR.equals(respMsg)) {
results.put(ModelService.ERROR_MESSAGE, "Validation Failed - invalid values");
return results;
}
initializeTransactionProcessor();
Map<String, Object> processCardResponseContext = processCard(request);
// For Debugging Purpose
printTransResult((ResponseIF) processCardResponseContext.get("processCardResponse"));
processReleaseTransResult(processCardResponseContext, results);
return results;
}
private static Map<String, Object> buildOrbitalProperties(Map<String, Object> context, Delegator delegator) {
//TODO: Will move this to property file and then will read it from there.
String configFile = "/applications/accounting/config/linehandler.properties";
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
Map<String, Object> buildConfiguratorContext = new HashMap<>();
try {
buildConfiguratorContext.put("OrbitalConnectionUsername", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "username"));
buildConfiguratorContext.put("OrbitalConnectionPassword", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "connectionPassword"));
buildConfiguratorContext.put("merchantId", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantId"));
buildConfiguratorContext.put("engine.class", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "engineClass"));
buildConfiguratorContext.put("engine.hostname", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "hostName"));
buildConfiguratorContext.put("engine.port", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "port"));
buildConfiguratorContext.put("engine.hostname.failover", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "hostNameFailover"));
buildConfiguratorContext.put("engine.port.failover", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "portFailover"));
buildConfiguratorContext.put("engine.connection_timeout_seconds", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "connectionTimeoutSeconds"));
buildConfiguratorContext.put("engine.read_timeout_seconds", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "readTimeoutSeconds"));
buildConfiguratorContext.put("engine.authorizationURI", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "authorizationURI"));
buildConfiguratorContext.put("engine.sdk_version", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "sdkVersion"));
buildConfiguratorContext.put("engine.ssl.socketfactory", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "sslSocketFactory"));
buildConfiguratorContext.put("Response.response_type", getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "responseType"));
String configFileLocation = System.getProperty("ofbiz.home") + configFile;
Configurator config = Configurator.getInstance(configFileLocation);
buildConfiguratorContext.putAll(config.getConfigurations());
config.setConfigurations(buildConfiguratorContext);
} catch (InitializationException e) {
Debug.logError(e, "Orbital Configurator Initialization Error: " + e.getMessage(), MODULE);
}
return buildConfiguratorContext;
}
private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId,
String paymentGatewayConfigParameterName) {
String returnValue = "";
if (UtilValidate.isNotEmpty(paymentGatewayConfigId)) {
try {
GenericValue paymentGatewayOrbital = EntityQuery.use(delegator).from("PaymentGatewayOrbital").where("paymentGatewayConfigId", paymentGatewayConfigId).queryOne();
if (paymentGatewayOrbital != null) {
Object paymentGatewayOrbitalField = paymentGatewayOrbital.get(paymentGatewayConfigParameterName);
if (paymentGatewayOrbitalField != null) {
return returnValue = paymentGatewayOrbitalField.toString().trim();
}
}
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
}
}
return returnValue;
}
private static void buildAuthOrAuthCaptureTransaction(Map<String, Object> params, Delegator delegator, Map<String, Object> props, RequestIF request, Map<String, Object> results) {
GenericValue cc = (GenericValue) params.get("creditCard");
BigDecimal amount = (BigDecimal) params.get("processAmount");
String amountValue = amount.setScale(DECIMALS, ROUNDING).movePointRight(2).toPlainString();
String number = UtilFormatOut.checkNull(cc.getString("cardNumber"));
String expDate = UtilFormatOut.checkNull(cc.getString("expireDate"));
expDate = formatExpDateForOrbital(expDate);
String cardSecurityCode = (String) params.get("cardSecurityCode");
String orderId = UtilFormatOut.checkNull((String) params.get("orderId"));
String transType = props.get("transType").toString();
String messageType = null;
if ("AUTH_ONLY".equals(transType)) {
messageType = "A";
} else if ("AUTH_CAPTURE".equals(transType)) {
messageType = "AC";
}
try {
request.setFieldValue("IndustryType", "EC");
request.setFieldValue("MessageType", UtilFormatOut.checkNull(messageType));
request.setFieldValue("MerchantID", UtilFormatOut.checkNull(props.get("merchantId").toString()));
request.setFieldValue("BIN", BIN_VALUE);
request.setFieldValue("OrderID", UtilFormatOut.checkNull(orderId));
request.setFieldValue("AccountNum", UtilFormatOut.checkNull(number));
request.setFieldValue("Amount", UtilFormatOut.checkNull(amountValue));
request.setFieldValue("Exp", UtilFormatOut.checkNull(expDate));
// AVS Information
GenericValue creditCard = null;
if (params.get("orderPaymentPreference") != null) {
GenericValue opp = (GenericValue) params.get("orderPaymentPreference");
if ("CREDIT_CARD".equals(opp.getString("paymentMethodTypeId"))) {
// sometimes the ccAuthCapture interface is used, in which case the creditCard is passed directly
creditCard = (GenericValue) params.get("creditCard");
if (creditCard == null || !(opp.get("paymentMethodId").equals(creditCard.get("paymentMethodId")))) {
creditCard = opp.getRelatedOne("CreditCard", false);
}
}
request.setFieldValue("AVSname", "Demo Customer");
if (UtilValidate.isNotEmpty(creditCard.getString("contactMechId"))) {
GenericValue address = creditCard.getRelatedOne("PostalAddress", false);
if (address != null) {
request.setFieldValue("AVSaddress1", UtilFormatOut.checkNull(address.getString("address1")));
request.setFieldValue("AVScity", UtilFormatOut.checkNull(address.getString("city")));
request.setFieldValue("AVSstate", UtilFormatOut.checkNull(address.getString("stateProvinceGeoId")));
request.setFieldValue("AVSzip", UtilFormatOut.checkNull(address.getString("postalCode")));
}
}
} else {
// this would be the case for an authorization
GenericValue cp = (GenericValue) params.get("billToParty");
GenericValue ba = (GenericValue) params.get("billingAddress");
request.setFieldValue("AVSname", UtilFormatOut.checkNull(cp.getString("firstName")) + UtilFormatOut.checkNull(cp.getString("lastName")));
request.setFieldValue("AVSaddress1", UtilFormatOut.checkNull(ba.getString("address1")));
request.setFieldValue("AVScity", UtilFormatOut.checkNull(ba.getString("city")));
request.setFieldValue("AVSstate", UtilFormatOut.checkNull(ba.getString("stateProvinceGeoId")));
request.setFieldValue("AVSzip", UtilFormatOut.checkNull(ba.getString("postalCode")));
request.setFieldValue("AVSCountryCode", UtilFormatOut.checkNull(ba.getString("countryGeoId")));
}
// Additional Information
request.setFieldValue("Comments", "This is building of request object");
String shippingRef = getShippingRefForOrder(orderId, delegator);
request.setFieldValue("ShippingRef", shippingRef);
request.setFieldValue("CardSecVal", UtilFormatOut.checkNull(cardSecurityCode));
//Display the request
if ("AUTH_ONLY".equals(transType)) {
Debug.logInfo("\nAuth Request:\n ======== " + request.getXML());
} else if ("AUTH_CAPTURE".equals(transType)) {
Debug.logInfo("\nAuth Capture Request:\n ======== " + request.getXML());
}
results.put("processAmount", amount);
} catch (InitializationException ie) {
Debug.logInfo("Unable to initialize request object", MODULE);
} catch (FieldNotFoundException fnfe) {
Debug.logError("Unable to find XML field in template", MODULE);
} catch (Exception e) {
Debug.logError(e, MODULE);
}
}
private static void buildCaptureTransaction(Map<String, Object> params, Delegator delegator, Map<String, Object> props, RequestIF request, Map<String, Object> results) {
GenericValue authTransaction = (GenericValue) params.get("authTransaction");
GenericValue creditCard = (GenericValue) params.get("creditCard");
BigDecimal amount = (BigDecimal) params.get("captureAmount");
String amountValue = amount.setScale(DECIMALS, ROUNDING).movePointRight(2).toPlainString();
String orderId = UtilFormatOut.checkNull((String) params.get("orderId"));
try {
//If there were no errors preparing the template, we can now specify the data
//Basic Auth Fields
request.setFieldValue("MerchantID", UtilFormatOut.checkNull(props.get("merchantId").toString()));
request.setFieldValue("BIN", BIN_VALUE);
request.setFieldValue("TxRefNum", UtilFormatOut.checkNull(authTransaction.get("referenceNum").toString()));
request.setFieldValue("OrderID", UtilFormatOut.checkNull(orderId));
request.setFieldValue("Amount", UtilFormatOut.checkNull(amountValue));
request.setFieldValue("PCDestName", UtilFormatOut.checkNull(creditCard.getString("firstNameOnCard") + creditCard.getString("lastNameOnCard")));
if (UtilValidate.isNotEmpty(creditCard.getString("contactMechId"))) {
GenericValue address = creditCard.getRelatedOne("PostalAddress", false);
if (address != null) {
request.setFieldValue("PCOrderNum", UtilFormatOut.checkNull(orderId));
request.setFieldValue("PCDestAddress1", UtilFormatOut.checkNull(address.getString("address1")));
request.setFieldValue("PCDestAddress2", UtilFormatOut.checkNull(address.getString("address2")));
request.setFieldValue("PCDestCity", UtilFormatOut.checkNull(address.getString("city")));
request.setFieldValue("PCDestState", UtilFormatOut.checkNull(address.getString("stateProvinceGeoId")));
request.setFieldValue("PCDestZip", UtilFormatOut.checkNull(address.getString("postalCode")));
}
}
//Display the request
Debug.logInfo("\nCapture Request:\n ======== " + request.getXML());
results.put("captureAmount", amount);
} catch (InitializationException ie) {
Debug.logInfo("Unable to initialize request object", MODULE);
} catch (FieldNotFoundException fnfe) {
Debug.logError("Unable to find XML field in template" + fnfe.getMessage(), MODULE);
} catch (Exception e) {
Debug.logError(e, MODULE);
}
}
private static void buildRefundTransaction(Map<String, Object> params, Map<String, Object> props, RequestIF request, Map<String, Object> results) {
GenericValue cc = (GenericValue) params.get("creditCard");
BigDecimal amount = (BigDecimal) params.get("refundAmount");
String amountValue = amount.setScale(DECIMALS, ROUNDING).movePointRight(2).toPlainString();
String number = UtilFormatOut.checkNull(cc.getString("cardNumber"));
String expDate = UtilFormatOut.checkNull(cc.getString("expireDate"));
expDate = formatExpDateForOrbital(expDate);
String orderId = UtilFormatOut.checkNull((String) params.get("orderId"));
try {
//If there were no errors preparing the template, we can now specify the data
//Basic Auth Fields
request.setFieldValue("IndustryType", "EC");
request.setFieldValue("MessageType", "R");
request.setFieldValue("MerchantID", UtilFormatOut.checkNull(props.get("merchantId").toString()));
request.setFieldValue("BIN", BIN_VALUE);
request.setFieldValue("OrderID", UtilFormatOut.checkNull(orderId));
request.setFieldValue("AccountNum", UtilFormatOut.checkNull(number));
request.setFieldValue("Amount", UtilFormatOut.checkNull(amountValue));
request.setFieldValue("Exp", UtilFormatOut.checkNull(expDate));
request.setFieldValue("Comments", "This is a credit card refund");
Debug.logInfo("\nRefund Request:\n ======== " + request.getXML());
results.put("refundAmount", amount);
} catch (InitializationException ie) {
Debug.logInfo("Unable to initialize request object", MODULE);
} catch (FieldNotFoundException fnfe) {
Debug.logError("Unable to find XML field in template", MODULE);
} catch (Exception e) {
Debug.logError(e, MODULE);
}
}
private static void buildReleaseTransaction(Map<String, Object> params, Delegator delegator, Map<String, Object> props, RequestIF request, Map<String, Object> results) {
BigDecimal amount = (BigDecimal) params.get("releaseAmount");
GenericValue authTransaction = (GenericValue) params.get("authTransaction");
String orderId = UtilFormatOut.checkNull((String) params.get("orderId"));
try {
//If there were no errors preparing the template, we can now specify the data
//Basic Auth Fields
request.setFieldValue("MerchantID", UtilFormatOut.checkNull(props.get("merchantId").toString()));
request.setFieldValue("BIN", BIN_VALUE);
request.setFieldValue("TxRefNum", UtilFormatOut.checkNull(authTransaction.get("referenceNum").toString()));
request.setFieldValue("OrderID", UtilFormatOut.checkNull(orderId));
//Display the request
Debug.logInfo("\nRelease Request:\n ======== " + request.getXML());
results.put("releaseAmount", amount);
} catch (InitializationException ie) {
Debug.logInfo("Unable to initialize request object", MODULE);
} catch (FieldNotFoundException fnfe) {
Debug.logError("Unable to find XML field in template" + fnfe.getMessage(), MODULE);
} catch (Exception e) {
Debug.logError(e, MODULE);
}
}
private static void initializeTransactionProcessor() {
//Create a Transaction Processor
//The Transaction Processor acquires and releases resources and executes transactions.
//It configures a pool of protocol engines, then uses the pool to execute transactions.
try {
tp = new TransactionProcessor();
} catch (InitializationException iex) {
Debug.logError("TransactionProcessor failed to initialize" + iex.getMessage(), MODULE);
iex.printStackTrace();
}
}
private static Map<String, Object> processCard(RequestIF request) {
Map<String, Object> processCardResult = new HashMap<>();
try {
response = tp.process(request);
if (response.isApproved()) {
processCardResult.put("authResult", Boolean.TRUE);
} else {
processCardResult.put("authResult", Boolean.FALSE);
}
processCardResult.put("processCardResponse", response);
} catch (TransactionException tex) {
Debug.logError("TransactionProcessor failed to initialize" + tex.getMessage(), MODULE);
tex.printStackTrace();
}
processCardResult.put(ModelService.RESPONSE_MESSAGE, ModelService.RESPOND_SUCCESS);
return processCardResult;
}
private static void processAuthTransResult(Map<String, Object> processCardResponseContext, Map<String, Object> results) {
ResponseIF response = (ResponseIF) processCardResponseContext.get("processCardResponse");
Boolean authResult = (Boolean) processCardResponseContext.get("authResult");
results.put("authResult", authResult);
results.put("authFlag", response.getResponseCode());
results.put("authMessage", response.getMessage());
if (authResult) { //passed
results.put("authCode", response.getAuthCode());
results.put("authRefNum", response.getTxRefNum());
results.put("cvCode", UtilFormatOut.checkNull(response.getCVV2RespCode()));
results.put("avsCode", response.getAVSResponseCode());
results.put("processAmount", new BigDecimal(results.get("processAmount").toString()));
} else {
results.put("authCode", response.getAuthCode());
results.put("processAmount", BigDecimal.ZERO);
results.put("authRefNum", OrbitalPaymentServices.ERROR);
}
Debug.logInfo("processAuthTransResult: " + results.toString(), MODULE);
}
private static void processAuthCaptureTransResult(Map<String, Object> processCardResponseContext, Map<String, Object> results) {
ResponseIF response = (ResponseIF) processCardResponseContext.get("processCardResponse");
Boolean authResult = (Boolean) processCardResponseContext.get("authResult");
results.put("authResult", authResult);
results.put("authFlag", response.getResponseCode());
results.put("authMessage", response.getMessage());
results.put("captureResult", authResult);
results.put("captureFlag", response.getResponseCode());
results.put("captureMessage", response.getMessage());
results.put("captureRefNum", response.getTxRefNum());
if (authResult) { //passed
results.put("authCode", response.getAuthCode());
results.put("authRefNum", response.getTxRefNum());
results.put("cvCode", UtilFormatOut.checkNull(response.getCVV2RespCode()));
results.put("avsCode", response.getAVSResponseCode());
results.put("processAmount", new BigDecimal(results.get("processAmount").toString()));
} else {
results.put("authCode", response.getAuthCode());
results.put("processAmount", BigDecimal.ZERO);
results.put("authRefNum", OrbitalPaymentServices.ERROR);
}
Debug.logInfo("processAuthCaptureTransResult: " + results.toString(), MODULE);
}
private static void processCaptureTransResult(Map<String, Object> processCardResponseContext, Map<String, Object> results) {
ResponseIF response = (ResponseIF) processCardResponseContext.get("processCardResponse");
Boolean captureResult = (Boolean) processCardResponseContext.get("authResult");
results.put("captureResult", captureResult);
results.put("captureFlag", response.getResponseCode());
results.put("captureMessage", response.getMessage());
results.put("captureRefNum", response.getTxRefNum());
if (captureResult) { //passed
results.put("captureCode", response.getAuthCode());
results.put("captureAmount", new BigDecimal(results.get("captureAmount").toString()));
} else {
results.put("captureAmount", BigDecimal.ZERO);
}
Debug.logInfo("processCaptureTransResult: " + results.toString(), MODULE);
}
private static void processRefundTransResult(Map<String, Object> processCardResponseContext, Map<String, Object> results) {
ResponseIF response = (ResponseIF) processCardResponseContext.get("processCardResponse");
Boolean refundResult = (Boolean) processCardResponseContext.get("authResult");
results.put("refundResult", refundResult);
results.put("refundFlag", response.getResponseCode());
results.put("refundMessage", response.getMessage());
results.put("refundRefNum", response.getTxRefNum());
if (refundResult) { //passed
results.put("refundCode", response.getAuthCode());
results.put("refundAmount", new BigDecimal(results.get("refundAmount").toString()));
} else {
results.put("refundAmount", BigDecimal.ZERO);
}
Debug.logInfo("processRefundTransResult: " + results.toString(), MODULE);
}
private static void processReleaseTransResult(Map<String, Object> processCardResponseContext, Map<String, Object> results) {
ResponseIF response = (ResponseIF) processCardResponseContext.get("processCardResponse");
Boolean releaseResult = (Boolean) processCardResponseContext.get("authResult");
results.put("releaseResult", releaseResult);
results.put("releaseFlag", response.getResponseCode());
results.put("releaseMessage", response.getMessage());
results.put("releaseRefNum", response.getTxRefNum());
if (releaseResult) { //passed
results.put("releaseCode", response.getAuthCode());
results.put("releaseAmount", new BigDecimal(results.get("releaseAmount").toString()));
} else {
results.put("releaseAmount", BigDecimal.ZERO);
}
Debug.logInfo("processReleaseTransResult: " + results.toString(), MODULE);
}
private static void printTransResult(ResponseIF response) {
Map<String, Object> generatedResponse = new HashMap<>();
generatedResponse.put("isGood", response.isGood());
generatedResponse.put("isError", response.isError());
generatedResponse.put("isQuickResponse", response.isQuickResponse());
generatedResponse.put("isApproved", response.isApproved());
generatedResponse.put("isDeclined", response.isDeclined());
generatedResponse.put("AuthCode", response.getAuthCode());
generatedResponse.put("TxRefNum", response.getTxRefNum());
generatedResponse.put("ResponseCode", response.getResponseCode());
generatedResponse.put("Status", response.getStatus());
generatedResponse.put("Message", response.getMessage());
generatedResponse.put("AVSCode", response.getAVSResponseCode());
generatedResponse.put("CVV2ResponseCode", response.getCVV2RespCode());
Debug.logInfo("printTransResult === " + generatedResponse.toString(), MODULE);
}
private static String formatExpDateForOrbital(String expDate) {
String formatedDate = expDate.substring(0, 2) + expDate.substring(5);
return formatedDate;
}
private static String getShippingRefForOrder(String orderId, Delegator delegator) {
String shippingRef = "";
try {
GenericValue trackingCodeOrder = EntityQuery.use(delegator).from("TrackingCodeOrder").where("orderId", orderId).queryFirst();
GenericValue trackingCode = null;
if (trackingCodeOrder != null) {
trackingCode = trackingCodeOrder.getRelatedOne("TrackingCode", false);
}
if (trackingCode != null && UtilValidate.isNotEmpty(trackingCode.getString("description"))) {
// get tracking code description and provide it into shipping reference.
shippingRef = trackingCode.getString("trackingCodeId") + "====" + trackingCode.getString("description");
} else {
shippingRef = "No Tracking Info processed in order";
}
} catch (GenericEntityException e) {
Debug.logError("Shipping Ref not found returning empty string", MODULE);
Debug.logError(e, MODULE);
}
return shippingRef;
}
private static Map<String, Object> validateRequest(Map<String, Object> params, Map props, RequestIF request) {
Map<String, Object> result = new HashMap<>();
result.put(ModelService.RESPONSE_MESSAGE, ModelService.RESPOND_SUCCESS);
return result;
}
}
|
apache/hive | 37,843 | itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql;
import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.cli.control.AbstractCliConfig;
import org.apache.hadoop.hive.common.io.CachingPrintStream;
import org.apache.hadoop.hive.common.io.SessionStream;
import org.apache.hadoop.hive.common.io.QTestFetchConverter;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreClientWithLocalCache;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType;
import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.events.NotificationEventPoll;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.mapper.StatsSources;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.processors.HiveCommand;
import org.apache.hadoop.hive.ql.qoption.QTestAuthorizerHandler;
import org.apache.hadoop.hive.ql.qoption.QTestDisabledHandler;
import org.apache.hadoop.hive.ql.qoption.QTestDatabaseHandler;
import org.apache.hadoop.hive.ql.qoption.QTestOptionDispatcher;
import org.apache.hadoop.hive.ql.qoption.QTestQueryHistoryHandler;
import org.apache.hadoop.hive.ql.qoption.QTestReplaceHandler;
import org.apache.hadoop.hive.ql.qoption.QTestSysDbHandler;
import org.apache.hadoop.hive.ql.qoption.QTestTimezoneHandler;
import org.apache.hadoop.hive.ql.qoption.QTestTransactional;
import org.apache.hadoop.hive.ql.scheduled.QTestScheduledQueryCleaner;
import org.apache.hadoop.hive.ql.scheduled.QTestScheduledQueryServiceProvider;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.common.util.ProcessUtils;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
/**
* QTestUtil.
*/
public class QTestUtil {
private static final Logger LOG = LoggerFactory.getLogger("QTestUtil");
public static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES";
private final String[] testOnlyCommands = new String[]{ "crypto", "erasure" };
public static String DEBUG_HINT =
"\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, "
+ "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs.";
private static final String QTEST_DRIVER_OPERATION_ID = "qtest_operation_id";
private static final String QTEST_DRIVER_USER = "qtest_driver_user";
private String testWarehouse;
@Deprecated private final String testFiles;
private final String outDir;
protected final String logDir;
private File inputFile;
private String inputContent;
private final Set<String> srcUDFs;
private final FsType fsType;
private ParseDriver pd;
protected Hive db;
protected HiveConf conf;
protected HiveConf savedConf;
private BaseSemanticAnalyzer sem;
private CliDriver cliDriver;
private final QTestMiniClusters miniClusters = new QTestMiniClusters();
private final QOutProcessor qOutProcessor;
private static QTestResultProcessor qTestResultProcessor = new QTestResultProcessor();
protected QTestDatasetHandler datasetHandler;
protected QTestReplaceHandler replaceHandler;
private final String initScript;
private final String cleanupScript;
QTestOptionDispatcher dispatcher = new QTestOptionDispatcher();
private boolean isSessionStateStarted = false;
public CliDriver getCliDriver() {
if (cliDriver == null) {
throw new RuntimeException("no clidriver");
}
return cliDriver;
}
/**
* Returns the default UDF names which should not be removed when resetting the test database
*
* @return The list of the UDF names not to remove
*/
private Set<String> getSrcUDFs() {
HashSet<String> srcUDFs = new HashSet<String>();
// FIXME: moved default value to here...for now
// i think this features is never really used from the command line
String defaultTestSrcUDFs = "qtest_get_java_boolean";
for (String srcUDF : QTestSystemProperties.getSourceUdfs(defaultTestSrcUDFs)) {
srcUDF = srcUDF.trim();
if (!srcUDF.isEmpty()) {
srcUDFs.add(srcUDF);
}
}
if (srcUDFs.isEmpty()) {
throw new RuntimeException("Source UDFs cannot be empty");
}
return srcUDFs;
}
public HiveConf getConf() {
return conf;
}
public void initConf() throws Exception {
if (QTestSystemProperties.isVectorizationEnabled()) {
conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, true);
}
// Plug verifying metastore in for testing DirectSQL.
conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL, "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
miniClusters.initConf(conf);
// disable query history altogether
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_QUERY_HISTORY_ENABLED, false);
// make DriverFactory able to create non-null QueryInfo objects
conf.set(DriverContext.DEFAULT_USER_NAME_PROP, QTestUtil.QTEST_DRIVER_USER);
conf.set(DriverContext.DEFAULT_OPERATION_ID_PROP, QTestUtil.QTEST_DRIVER_OPERATION_ID);
}
public QTestUtil(QTestArguments testArgs) throws Exception {
LOG.info("Setting up QTestUtil with outDir={}, logDir={}, clusterType={}, confDir={},"
+ " initScript={}, cleanupScript={}, withLlapIo={}, fsType={}",
testArgs.getOutDir(),
testArgs.getLogDir(),
testArgs.getClusterType(),
testArgs.getConfDir(),
testArgs.getInitScript(),
testArgs.getCleanupScript(),
testArgs.isWithLlapIo(),
testArgs.getFsType());
logClassPath();
Preconditions.checkNotNull(testArgs.getClusterType(), "ClusterType cannot be null");
this.fsType = testArgs.getFsType();
this.outDir = testArgs.getOutDir();
this.logDir = testArgs.getLogDir();
this.srcUDFs = getSrcUDFs();
this.replaceHandler = new QTestReplaceHandler();
this.qOutProcessor = new QOutProcessor(fsType, replaceHandler);
// HIVE-14443 move this fall-back logic to CliConfigs
if (testArgs.getConfDir() != null && !testArgs.getConfDir().isEmpty()) {
HiveConf.setHiveSiteLocation(new URL("file://"
+ new File(testArgs.getConfDir()).toURI().getPath()
+ "/hive-site.xml"));
MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation());
System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation());
}
// For testing configurations set by System.setProperties
System.setProperty("hive.query.max.length", "100Mb");
conf = new HiveConf(IDriver.class);
setCustomConfs(conf, testArgs.getCustomConfs());
setMetaStoreProperties();
final String scriptsDir = getScriptsDir(conf);
this.miniClusters.setup(testArgs, conf, scriptsDir, logDir);
initConf();
datasetHandler = new QTestDatasetHandler(conf);
testFiles = datasetHandler.getDataDir(conf);
conf.set("test.data.dir", datasetHandler.getDataDir(conf));
conf.setVar(ConfVars.HIVE_QUERY_RESULTS_CACHE_DIRECTORY, "/tmp/hive/_resultscache_" + ProcessUtils.getPid());
dispatcher.register("dataset", datasetHandler);
dispatcher.register("replace", replaceHandler);
dispatcher.register("sysdb", new QTestSysDbHandler());
dispatcher.register("transactional", new QTestTransactional());
dispatcher.register("scheduledqueryservice", new QTestScheduledQueryServiceProvider(conf));
dispatcher.register("scheduledquerycleaner", new QTestScheduledQueryCleaner());
dispatcher.register("timezone", new QTestTimezoneHandler());
dispatcher.register("authorizer", new QTestAuthorizerHandler());
dispatcher.register("disabled", new QTestDisabledHandler());
dispatcher.register("database", new QTestDatabaseHandler(scriptsDir));
dispatcher.register("queryhistory", new QTestQueryHistoryHandler());
this.initScript = scriptsDir + File.separator + testArgs.getInitScript();
this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript();
savedConf = new HiveConf(conf);
}
private void setCustomConfs(HiveConf conf, Map<ConfVars,String> customConfigValueMap) {
customConfigValueMap.entrySet().forEach(item-> conf.set(item.getKey().varname, item.getValue()));
}
private void logClassPath() {
String classpath = System.getProperty("java.class.path");
String[] classpathEntries = classpath.split(File.pathSeparator);
LOG.info("QTestUtil classpath: " + String.join("\n", Arrays.asList(classpathEntries)));
}
private void setMetaStoreProperties() {
setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY);
setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER);
setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME);
setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.PWD);
setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.AUTO_CREATE_ALL);
}
private void setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars var) {
if (System.getProperty(var.getVarname()) != null) {
if (var.getDefaultVal().getClass() == Boolean.class) {
MetastoreConf.setBoolVar(conf, var, Boolean.getBoolean(System.getProperty(var.getVarname())));
} else {
MetastoreConf.setVar(conf, var, System.getProperty(var.getVarname()));
}
}
}
public static String getScriptsDir(HiveConf conf) {
// Use the current directory if it is not specified
String scriptsDir = conf.get("test.data.scripts");
if (scriptsDir == null) {
scriptsDir = new File(".").getAbsolutePath() + "/data/scripts";
}
return scriptsDir;
}
public void shutdown() throws Exception {
if (System.getenv(QTEST_LEAVE_FILES) == null) {
cleanUp();
}
miniClusters.shutDown();
Hive.closeCurrent();
}
public void setInputFile(String queryFile) throws IOException {
setInputFile(new File(queryFile));
}
public void setInputFile(File qf) throws IOException {
String query = FileUtils.readFileToString(qf, StandardCharsets.UTF_8);
inputFile = qf;
inputContent = query;
qOutProcessor.initMasks(query);
qTestResultProcessor.init(query);
}
public final File getInputFile() {
return inputFile;
}
/**
* Clear out any side effects of running tests
*/
public void clearPostTestEffects() throws Exception {
dispatcher.afterTest(this);
miniClusters.postTest(conf);
}
public void clearKeysCreatedInTests() {
if (miniClusters.getHdfsEncryptionShim() == null) {
return;
}
try {
for (String keyAlias : miniClusters.getHdfsEncryptionShim().getKeys()) {
miniClusters.getHdfsEncryptionShim().deleteKey(keyAlias);
}
} catch (IOException e) {
LOG.error("Fail to clean the keys created in test due to the error", e);
}
}
public void clearUDFsCreatedDuringTests() throws Exception {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
// Delete functions created by the tests
// It is enough to remove functions from the default database, other databases are dropped
for (String udfName : db.getFunctions(DEFAULT_DATABASE_NAME, ".*")) {
if (!srcUDFs.contains(udfName)) {
db.dropFunction(DEFAULT_DATABASE_NAME, udfName);
}
}
}
/**
* Clear out any side effects of running tests
*/
public void clearTablesCreatedDuringTests() throws Exception {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
conf.set("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl");
db = Hive.get(conf);
// First delete any MVs to avoid race conditions
for (String dbName : db.getAllDatabases()) {
SessionState.get().setCurrentDatabase(dbName);
for (String tblName : db.getAllTables()) {
Table tblObj = null;
try {
tblObj = db.getTable(tblName);
} catch (InvalidTableException e) {
LOG.warn("Trying to drop table " + e.getTableName() + ". But it does not exist.");
continue;
}
// only remove MVs first
if (!tblObj.isMaterializedView()) {
continue;
}
db.dropTable(dbName, tblName, true, true, fsType == FsType.ENCRYPTED_HDFS);
HiveMaterializedViewsRegistry.get().dropMaterializedView(tblObj.getDbName(), tblObj.getTableName());
}
}
// Delete any tables other than the source tables
// and any databases other than the default database.
for (String dbName : db.getAllDatabases()) {
SessionState.get().setCurrentDatabase(dbName);
// FIXME: HIVE-24130 should remove this
if (dbName.equalsIgnoreCase("tpch_0_001")) {
continue;
}
for (String tblName : db.getAllTables()) {
if (!DEFAULT_DATABASE_NAME.equals(dbName) || !QTestDatasetHandler.isSourceTable(tblName)) {
try {
db.getTable(tblName);
} catch (InvalidTableException e) {
LOG.warn("Trying to drop table " + e.getTableName() + ". But it does not exist.");
continue;
}
db.dropTable(dbName, tblName, true, true, miniClusters.fsNeedsPurge(fsType));
}
}
if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
// Drop cascade, functions dropped by cascade
db.dropDatabase(dbName, true, true, true);
}
}
// delete remaining directories for external tables (can affect stats for following tests)
try {
Path p = new Path(testWarehouse);
FileSystem fileSystem = p.getFileSystem(conf);
if (fileSystem.exists(p)) {
for (FileStatus status : fileSystem.listStatus(p)) {
if (status.isDirectory() && !QTestDatasetHandler.isSourceTable(status.getPath().getName())) {
fileSystem.delete(status.getPath(), true);
}
}
}
} catch (IllegalArgumentException e) {
// ignore.. provides invalid url sometimes intentionally
}
SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME);
List<String> roleNames = db.getAllRoleNames();
for (String roleName : roleNames) {
if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) {
db.dropRole(roleName);
}
}
}
public void newSession() throws Exception {
newSession(true);
}
public void newSession(boolean canReuseSession) throws Exception {
// allocate and initialize a new conf since a test can
// modify conf by using 'set' commands
conf = new HiveConf(savedConf);
initConf();
initConfFromSetup();
// renew the metastore since the cluster type is unencrypted
db = Hive.get(conf); // propagate new conf to meta store
HiveConf.setVar(conf,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
CliSessionState ss = new CliSessionState(conf);
ss.in = System.in;
SessionState oldSs = SessionState.get();
miniClusters.restartSessions(canReuseSession, ss, oldSs);
closeSession(oldSs);
SessionState.start(ss);
cliDriver = new CliDriver();
File outf = new File(logDir, "initialize.log");
setSessionOutputs(ss, outf);
}
/**
* Clear out any side effects of running tests
*/
public void clearTestSideEffects() throws Exception {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
// the test might have configured security/etc; open a new session to get rid of that
newSession();
// Remove any cached results from the previous test.
Utilities.clearWorkMap(conf);
NotificationEventPoll.shutdown();
QueryResultsCache.cleanupInstance();
clearTablesCreatedDuringTests();
clearUDFsCreatedDuringTests();
clearKeysCreatedInTests();
StatsSources.clearGlobalStats();
dispatcher.afterTest(this);
}
protected void initConfFromSetup() throws Exception {
miniClusters.preTest(conf);
}
public void cleanUp() throws Exception {
if (!isSessionStateStarted) {
startSessionState(qTestResultProcessor.canReuseSession());
}
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
conf.setBoolean("hive.test.shutdown.phase", true);
clearKeysCreatedInTests();
String metastoreDb = QTestSystemProperties.getMetaStoreDb();
if (metastoreDb == null || "derby".equalsIgnoreCase(metastoreDb)) {
// otherwise, the docker container is already destroyed by this time
cleanupFromFile();
}
// delete any contents in the warehouse dir
Path p = new Path(testWarehouse);
FileSystem fs = p.getFileSystem(conf);
try {
FileStatus[] ls = fs.listStatus(p);
for (int i = 0; (ls != null) && (i < ls.length); i++) {
fs.delete(ls[i].getPath(), true);
}
} catch (FileNotFoundException e) {
// Best effort
}
// TODO: Clean up all the other paths that are created.
FunctionRegistry.unregisterTemporaryUDF("test_udaf");
FunctionRegistry.unregisterTemporaryUDF("test_error");
}
private void cleanupFromFile() throws IOException {
File cleanupFile = new File(cleanupScript);
if (cleanupFile.isFile()) {
String cleanupCommands = FileUtils.readFileToString(cleanupFile, StandardCharsets.UTF_8);
LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
try {
getCliDriver().processLine(cleanupCommands);
} catch (CommandProcessorException e) {
LOG.error("Failed during cleanup processLine with code={}. Ignoring", e.getResponseCode());
// TODO Convert this to an Assert.fail once HIVE-14682 is fixed
}
} else {
LOG.info("No cleanup script detected. Skipping.");
}
}
public void createSources() throws Exception {
if (!isSessionStateStarted) {
startSessionState(qTestResultProcessor.canReuseSession());
}
getCliDriver().processLine("set test.data.dir=" + testFiles + ";");
conf.setBoolean("hive.test.init.phase", true);
initFromScript();
conf.setBoolean("hive.test.init.phase", false);
}
private void initFromScript() throws IOException {
File scriptFile = new File(this.initScript);
if (!scriptFile.isFile()) {
LOG.info("No init script detected. Skipping");
return;
}
String initCommands = FileUtils.readFileToString(scriptFile, StandardCharsets.UTF_8);
LOG.info("Initial setup (" + initScript + "):\n" + initCommands);
try {
cliDriver.processLine(initCommands);
LOG.info("Result from cliDrriver.processLine in createSources=0");
} catch (CommandProcessorException e) {
Assert.fail("Failed during createSources processLine with code=" + e.getResponseCode());
}
}
public void postInit() throws Exception {
miniClusters.postInit(conf);
sem = new SemanticAnalyzer(new QueryState.Builder().withHiveConf(conf).build());
testWarehouse = conf.getVar(HiveConf.ConfVars.METASTORE_WAREHOUSE);
db = Hive.get(conf);
pd = new ParseDriver();
initMaterializedViews(); // Create views registry
firstStartSessionState();
// setup metastore client cache
if (conf.getBoolVar(ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init(conf);
}
}
private void initMaterializedViews() {
String registryImpl = db.getConf().get("hive.server2.materializedviews.registry.impl");
db.getConf().set("hive.server2.materializedviews.registry.impl", "DUMMY");
HiveMaterializedViewsRegistry.get().init(db);
db.getConf().set("hive.server2.materializedviews.registry.impl", registryImpl);
}
//FIXME: check why mr is needed for starting a session state from conf
private void firstStartSessionState() {
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
}
public String cliInit() throws Exception {
File file = Objects.requireNonNull(inputFile);
String fileName = inputFile.getName();
dispatcher.process(file);
dispatcher.beforeTest(this);
if (!qTestResultProcessor.canReuseSession()) {
newSession(false);
}
CliSessionState ss = (CliSessionState) SessionState.get();
String outFileExtension = getOutFileExtension();
String stdoutName = null;
if (outDir != null) {
// TODO: why is this needed?
File qf = new File(outDir, fileName);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = fileName + outFileExtension;
}
File outf = new File(logDir, stdoutName);
setSessionOutputs(ss, outf);
ss.setIsQtestLogging(true);
if (fileName.equals("init_file.q")) {
ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
private void setSessionOutputs(CliSessionState ss, File outf) throws Exception {
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (ss.out != null) {
ss.out.flush();
}
if (ss.err != null) {
ss.err.flush();
}
qTestResultProcessor.setOutputs(ss, fo);
ss.out = new QTestFetchConverter(ss.out, false, "UTF-8", line -> {
notifyOutputLine(line);
if (qOutProcessor != null) {
// ensure that the masking is done before the sorting of the query results
return qOutProcessor.processLine(line).get();
}
return line;
});
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
ss.setIsQtestLogging(true);
}
/** Lets the implementor know that a new line has been produced in the output */
protected void notifyOutputLine(String line) {
// by default do nothing
}
public CliSessionState startSessionState(boolean canReuseSession) throws IOException {
HiveConf.setVar(conf,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
//FIXME: check why mr is needed for starting a session state from conf
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
CliSessionState ss = new CliSessionState(conf);
ss.in = System.in;
ss.out = new SessionStream(System.out);
ss.err = new SessionStream(System.out);
SessionState oldSs = SessionState.get();
miniClusters.restartSessions(canReuseSession, ss, oldSs);
closeSession(oldSs);
SessionState.start(ss);
isSessionStateStarted = true;
conf.set("hive.execution.engine", execEngine);
return ss;
}
private void closeSession(SessionState oldSs) throws IOException {
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
if (oldSs != null) {
oldSs.close();
}
}
public int executeAdHocCommand(String q) throws CommandProcessorException {
if (!q.contains(";")) {
return -1;
}
String q1 = q.split(";")[0] + ";";
LOG.debug("Executing " + q1);
cliDriver.processLine(q1);
return 0;
}
public CommandProcessorResponse executeClient() throws CommandProcessorException {
return executeClientInternal(getCommand());
}
private CommandProcessorResponse executeClientInternal(String commands) throws CommandProcessorException {
List<String> cmds = CliDriver.splitSemiColon(commands);
CommandProcessorResponse response = new CommandProcessorResponse();
StringBuilder command = new StringBuilder();
QTestSyntaxUtil qtsu = new QTestSyntaxUtil(this, conf, pd);
qtsu.checkQFileSyntax(cmds);
for (String oneCmd : cmds) {
if (StringUtils.endsWith(oneCmd, "\\")) {
command.append(StringUtils.chop(oneCmd) + "\\;");
continue;
} else {
if (isHiveCommand(oneCmd)) {
command.setLength(0);
}
command.append(oneCmd);
}
if (StringUtils.isBlank(command.toString())) {
continue;
}
String strCommand = command.toString();
try {
if (isCommandUsedForTesting(strCommand)) {
response = executeTestCommand(strCommand);
} else {
response = cliDriver.processLine(strCommand);
}
} catch (CommandProcessorException e) {
if (!ignoreErrors()) {
throw e;
}
}
command.setLength(0);
}
if (SessionState.get() != null) {
SessionState.get().setLastCommand(null); // reset
}
return response;
}
/**
* This allows a .q file to continue executing after a statement runs into an error which is convenient
* if you want to use another hive cmd after the failure to sanity check the state of the system.
*/
private boolean ignoreErrors() {
return conf.getBoolVar(HiveConf.ConfVars.CLI_IGNORE_ERRORS);
}
boolean isHiveCommand(String command) {
String[] cmd = command.trim().split("\\s+");
if (HiveCommand.find(cmd) != null) {
return true;
} else if (HiveCommand.find(cmd, HiveCommand.ONLY_FOR_TESTING) != null) {
return true;
} else {
return false;
}
}
private CommandProcessorResponse executeTestCommand(String command) throws CommandProcessorException {
String commandName = command.trim().split("\\s+")[0];
String commandArgs = command.trim().substring(commandName.length());
if (commandArgs.endsWith(";")) {
commandArgs = StringUtils.chop(commandArgs);
}
//replace ${hiveconf:hive.metastore.warehouse.dir} with actual dir if existed.
//we only want the absolute path, so remove the header, such as hdfs://localhost:57145
String wareHouseDir =
SessionState.get().getConf().getVar(ConfVars.METASTORE_WAREHOUSE).replaceAll("^[a-zA-Z]+://.*?:\\d+", "");
commandArgs = commandArgs.replaceAll("\\$\\{hiveconf:hive\\.metastore\\.warehouse\\.dir\\}", wareHouseDir);
if (SessionState.get() != null) {
SessionState.get().setLastCommand(commandName + " " + commandArgs.trim());
}
enableTestOnlyCmd(SessionState.get().getConf());
try {
CommandProcessor proc = getTestCommand(commandName);
if (proc != null) {
try {
CommandProcessorResponse response = proc.run(commandArgs.trim());
SessionState.get().out.flush();
return response;
} catch (CommandProcessorException e) {
SessionState.getConsole().printError(e.toString(),
e.getCause() != null ? Throwables.getStackTraceAsString(e.getCause()) : "");
throw e;
}
} else {
throw new RuntimeException("Could not get CommandProcessor for command: " + commandName);
}
} catch (Exception e) {
throw new RuntimeException("Could not execute test command", e);
}
}
private CommandProcessor getTestCommand(final String commandName) throws SQLException {
HiveCommand testCommand = HiveCommand.find(new String[]{ commandName }, HiveCommand.ONLY_FOR_TESTING);
if (testCommand == null) {
return null;
}
return CommandProcessorFactory.getForHiveCommandInternal(new String[]{ commandName },
SessionState.get().getConf(),
testCommand.isOnlyForTesting());
}
private void enableTestOnlyCmd(HiveConf conf) {
StringBuilder securityCMDs = new StringBuilder(conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST));
for (String c : testOnlyCommands) {
securityCMDs.append(",");
securityCMDs.append(c);
}
conf.set(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.toString(), securityCMDs.toString());
}
private boolean isCommandUsedForTesting(final String command) {
String commandName = command.trim().split("\\s+")[0];
HiveCommand testCommand = HiveCommand.find(new String[]{ commandName }, HiveCommand.ONLY_FOR_TESTING);
return testCommand != null;
}
private String getCommand() {
String commands = inputContent;
StringBuilder newCommands = new StringBuilder(commands.length());
int lastMatchEnd = 0;
Matcher commentMatcher = Pattern.compile("^--.*$", Pattern.MULTILINE).matcher(commands);
// remove the comments
while (commentMatcher.find()) {
newCommands.append(commands.substring(lastMatchEnd, commentMatcher.start()));
lastMatchEnd = commentMatcher.end();
}
newCommands.append(commands.substring(lastMatchEnd, commands.length()));
commands = newCommands.toString();
return commands;
}
private String getOutFileExtension() {
return ".out";
}
public QTestProcessExecResult checkNegativeResults(String tname, Exception e) throws Exception {
String outFileExtension = getOutFileExtension();
File qf = new File(outDir, tname);
String expf = outPath(outDir.toString(), tname.concat(outFileExtension));
File outf = null;
outf = new File(logDir);
outf = new File(outf, qf.getName().concat(outFileExtension));
FileWriter outfd = new FileWriter(outf);
if (e instanceof ParseException) {
outfd.write("Parse Error: ");
} else if (e instanceof SemanticException) {
outfd.write("Semantic Exception: \n");
} else {
outfd.close();
throw e;
}
outfd.write(e.getMessage());
outfd.close();
QTestProcessExecResult result = qTestResultProcessor.executeDiffCommand(outf.getPath(), expf, false);
if (QTestSystemProperties.shouldOverwriteResults()) {
qTestResultProcessor.overwriteResults(outf.getPath(), expf);
return QTestProcessExecResult.createWithoutOutput(0);
}
return result;
}
public QTestProcessExecResult checkNegativeResults(String tname, Error e) throws Exception {
String outFileExtension = getOutFileExtension();
File qf = new File(outDir, tname);
String expf = outPath(outDir.toString(), tname.concat(outFileExtension));
File outf = null;
outf = new File(logDir);
outf = new File(outf, qf.getName().concat(outFileExtension));
FileWriter outfd = new FileWriter(outf, true);
outfd.write("FAILED: "
+ e.getClass().getSimpleName()
+ " "
+ e.getClass().getName()
+ ": "
+ e.getMessage()
+ "\n");
outfd.close();
QTestProcessExecResult result = qTestResultProcessor.executeDiffCommand(outf.getPath(), expf, false);
if (QTestSystemProperties.shouldOverwriteResults()) {
qTestResultProcessor.overwriteResults(outf.getPath(), expf);
return QTestProcessExecResult.createWithoutOutput(0);
}
return result;
}
/**
* Given the current configurations (e.g., hadoop version and execution mode), return
* the correct file name to compare with the current test run output.
*
* @param outDir The directory where the reference log files are stored.
* @param testName The test file name (terminated by ".out").
* @return The file name appended with the configuration values if it exists.
*/
public String outPath(String outDir, String testName) {
String ret = (new File(outDir, testName)).getPath();
// List of configurations. Currently the list consists of hadoop version and execution mode only
List<String> configs = new ArrayList<String>();
configs.add(miniClusters.getClusterType().getQOutFileExtensionPostfix());
Deque<String> stack = new LinkedList<String>();
StringBuilder sb = new StringBuilder();
sb.append(testName);
stack.push(sb.toString());
// example file names are input1.q.out_mr_0.17 or input2.q.out_0.17
for (String s : configs) {
sb.append('_');
sb.append(s);
stack.push(sb.toString());
}
while (stack.size() > 0) {
String fileName = stack.pop();
File f = new File(outDir, fileName);
if (f.exists()) {
ret = f.getPath();
break;
}
}
return ret;
}
public QTestProcessExecResult checkCliDriverResults() throws Exception {
String tname = inputFile.getName();
String outFileExtension = getOutFileExtension();
String outFileName = outPath(outDir, tname + outFileExtension);
File f = new File(logDir, tname + outFileExtension);
qOutProcessor.maskPatterns(f.getPath());
if (QTestSystemProperties.shouldOverwriteResults()) {
qTestResultProcessor.overwriteResults(f.getPath(), outFileName);
return QTestProcessExecResult.createWithoutOutput(0);
} else {
return qTestResultProcessor.executeDiffCommand(f.getPath(), outFileName, false);
}
}
public ASTNode parseQuery() throws Exception {
return pd.parse(inputContent).getTree();
}
public List<Task<?>> analyzeAST(ASTNode ast) throws Exception {
// Do semantic analysis and plan generation
Context ctx = new Context(conf);
while ((ast.getToken() == null) && (ast.getChildCount() > 0)) {
ast = (ASTNode) ast.getChild(0);
}
sem.getOutputs().clear();
sem.getInputs().clear();
sem.analyze(ast, ctx);
ctx.clear();
return sem.getRootTasks();
}
// for negative tests, which is succeeded.. no need to print the query string
public void failed(String fname, String debugHint) {
Assert.fail("Client Execution was expected to fail, but succeeded with error code 0 for fname=" + fname + (debugHint
!= null ? (" " + debugHint) : ""));
}
public void failedDiff(int ecode, String fname, String debugHint) {
String
message =
"Client Execution succeeded but contained differences "
+ "(error code = "
+ ecode
+ ") after executing "
+ fname
+ (debugHint != null ? (" " + debugHint) : "");
LOG.error(message);
Assert.fail(message);
}
public void failedQuery(Throwable e, int ecode, String fname, String debugHint) {
String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
String message = String.format(
"Client execution failed with error code = %d %nrunning %s %nfname=%s%n%s%n %s", ecode,
command != null ? command : "", fname, debugHint != null ? debugHint : "",
e == null ? "" : org.apache.hadoop.util.StringUtils.stringifyException(e));
LOG.error(message);
Assert.fail(message);
}
public void failedWithException(Exception e, String fname, String debugHint) {
String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
System.err.println("Failed query: " + fname);
System.err.flush();
Assert.fail("Unexpected exception " + org.apache.hadoop.util.StringUtils.stringifyException(e) + "\n" + (command
!= null ? " running " + command : "") + (debugHint != null ? debugHint : ""));
}
public QOutProcessor getQOutProcessor() {
return qOutProcessor;
}
public static void initEventNotificationPoll() throws Exception {
NotificationEventPoll.initialize(SessionState.get().getConf());
}
}
|
apache/distributedlog | 38,103 | distributedlog-proxy-server/src/test/java/org/apache/distributedlog/service/TestDistributedLogService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.service;
import static com.google.common.base.Charsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.collect.Lists;
import io.netty.buffer.Unpooled;
import org.apache.distributedlog.DLSN;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.TestDistributedLogBase;
import org.apache.distributedlog.acl.DefaultAccessControlManager;
import org.apache.distributedlog.client.routing.LocalRoutingService;
import org.apache.distributedlog.exceptions.OwnershipAcquireFailedException;
import org.apache.distributedlog.exceptions.StreamUnavailableException;
import org.apache.distributedlog.protocol.util.ProtocolUtils;
import org.apache.distributedlog.service.config.NullStreamConfigProvider;
import org.apache.distributedlog.service.config.ServerConfiguration;
import org.apache.distributedlog.service.placement.EqualLoadAppraiser;
import org.apache.distributedlog.service.stream.Stream;
import org.apache.distributedlog.service.stream.StreamImpl;
import org.apache.distributedlog.service.stream.StreamImpl.StreamStatus;
import org.apache.distributedlog.service.stream.StreamManagerImpl;
import org.apache.distributedlog.service.stream.WriteOp;
import org.apache.distributedlog.service.streamset.DelimiterStreamPartitionConverter;
import org.apache.distributedlog.service.streamset.IdentityStreamPartitionConverter;
import org.apache.distributedlog.service.streamset.StreamPartitionConverter;
import org.apache.distributedlog.thrift.service.HeartbeatOptions;
import org.apache.distributedlog.thrift.service.StatusCode;
import org.apache.distributedlog.thrift.service.WriteContext;
import org.apache.distributedlog.thrift.service.WriteResponse;
import org.apache.distributedlog.util.ConfUtils;
import com.twitter.util.Await;
import com.twitter.util.Future;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.feature.SettableFeature;
import org.apache.bookkeeper.stats.NullStatsLogger;
import org.apache.bookkeeper.util.ReflectionUtils;
import org.apache.commons.configuration.ConfigurationException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test Case for DistributedLog Service.
*/
public class TestDistributedLogService extends TestDistributedLogBase {
private static final Logger logger = LoggerFactory.getLogger(TestDistributedLogService.class);
@Rule
public TestName testName = new TestName();
private ServerConfiguration serverConf;
private DistributedLogConfiguration dlConf;
private URI uri;
private final CountDownLatch latch = new CountDownLatch(1);
private DistributedLogServiceImpl service;
@Before
@Override
public void setup() throws Exception {
super.setup();
dlConf = new DistributedLogConfiguration();
dlConf.addConfiguration(conf);
dlConf.setLockTimeout(0)
.setOutputBufferSize(0)
.setPeriodicFlushFrequencyMilliSeconds(10)
.setSchedulerShutdownTimeoutMs(100);
serverConf = newLocalServerConf();
uri = createDLMURI("/" + testName.getMethodName());
ensureURICreated(uri);
service = createService(serverConf, dlConf, latch);
}
@After
@Override
public void teardown() throws Exception {
if (null != service) {
service.shutdown();
}
super.teardown();
}
private DistributedLogConfiguration newLocalConf() {
DistributedLogConfiguration confLocal = new DistributedLogConfiguration();
confLocal.addConfiguration(dlConf);
return confLocal;
}
private ServerConfiguration newLocalServerConf() {
ServerConfiguration serverConf = new ServerConfiguration();
serverConf.loadConf(dlConf);
serverConf.setServerThreads(1);
return serverConf;
}
private DistributedLogServiceImpl createService(
ServerConfiguration serverConf,
DistributedLogConfiguration dlConf) throws Exception {
return createService(serverConf, dlConf, new CountDownLatch(1));
}
private DistributedLogServiceImpl createService(
ServerConfiguration serverConf,
DistributedLogConfiguration dlConf,
CountDownLatch latch) throws Exception {
// Build the stream partition converter
StreamPartitionConverter converter;
try {
converter = ReflectionUtils.newInstance(serverConf.getStreamPartitionConverterClass());
} catch (ConfigurationException e) {
logger.warn("Failed to load configured stream-to-partition converter. Fallback to use {}",
IdentityStreamPartitionConverter.class.getName());
converter = new IdentityStreamPartitionConverter();
}
return new DistributedLogServiceImpl(
serverConf,
dlConf,
ConfUtils.getConstDynConf(dlConf),
new NullStreamConfigProvider(),
uri,
converter,
new LocalRoutingService(),
NullStatsLogger.INSTANCE,
NullStatsLogger.INSTANCE,
latch,
new EqualLoadAppraiser());
}
private StreamImpl createUnstartedStream(DistributedLogServiceImpl service,
String name) throws Exception {
StreamImpl stream = (StreamImpl) service.newStream(name);
stream.initialize();
return stream;
}
private ByteBuffer createRecord(long txid) {
return ByteBuffer.wrap(("record-" + txid).getBytes(UTF_8));
}
private WriteOp createWriteOp(DistributedLogServiceImpl service,
String streamName,
long txid) {
ByteBuffer data = createRecord(txid);
return service.newWriteOp(streamName, data, null);
}
@Test(timeout = 60000)
public void testAcquireStreams() throws Exception {
String streamName = testName.getMethodName();
StreamImpl s0 = createUnstartedStream(service, streamName);
ServerConfiguration serverConf1 = new ServerConfiguration();
serverConf1.addConfiguration(serverConf);
serverConf1.setServerPort(9999);
DistributedLogServiceImpl service1 = createService(serverConf1, dlConf);
StreamImpl s1 = createUnstartedStream(service1, streamName);
// create write ops
WriteOp op0 = createWriteOp(service, streamName, 0L);
s0.submit(op0);
WriteOp op1 = createWriteOp(service1, streamName, 1L);
s1.submit(op1);
// check pending size
assertEquals("Write Op 0 should be pending in service 0",
1, s0.numPendingOps());
assertEquals("Write Op 1 should be pending in service 1",
1, s1.numPendingOps());
// start acquiring s0
s0.start();
WriteResponse wr0 = Await.result(op0.result());
assertEquals("Op 0 should succeed",
StatusCode.SUCCESS, wr0.getHeader().getCode());
assertEquals("Service 0 should acquire stream",
StreamStatus.INITIALIZED, s0.getStatus());
assertNotNull(s0.getManager());
assertNotNull(s0.getWriter());
assertNull(s0.getLastException());
// start acquiring s1
s1.start();
WriteResponse wr1 = Await.result(op1.result());
assertEquals("Op 1 should fail",
StatusCode.FOUND, wr1.getHeader().getCode());
// the stream will be set to ERROR and then be closed.
assertTrue("Service 1 should be in unavailable state",
StreamStatus.isUnavailable(s1.getStatus()));
assertNotNull(s1.getManager());
assertNull(s1.getWriter());
assertNotNull(s1.getLastException());
assertTrue(s1.getLastException() instanceof OwnershipAcquireFailedException);
service1.shutdown();
}
@Test(timeout = 60000)
public void testAcquireStreamsWhenExceedMaxCachedPartitions() throws Exception {
String streamName = testName.getMethodName() + "_0000";
DistributedLogConfiguration confLocal = new DistributedLogConfiguration();
confLocal.addConfiguration(dlConf);
confLocal.setMaxCachedPartitionsPerProxy(1);
ServerConfiguration serverConfLocal = new ServerConfiguration();
serverConfLocal.addConfiguration(serverConf);
serverConfLocal.setStreamPartitionConverterClass(DelimiterStreamPartitionConverter.class);
DistributedLogServiceImpl serviceLocal = createService(serverConfLocal, confLocal);
Stream stream = serviceLocal.getLogWriter(streamName);
// stream is cached
assertNotNull(stream);
assertEquals(1, serviceLocal.getStreamManager().numCached());
// create write ops
WriteOp op0 = createWriteOp(service, streamName, 0L);
stream.submit(op0);
WriteResponse wr0 = Await.result(op0.result());
assertEquals("Op 0 should succeed",
StatusCode.SUCCESS, wr0.getHeader().getCode());
assertEquals(1, serviceLocal.getStreamManager().numAcquired());
// should fail to acquire another partition
try {
serviceLocal.getLogWriter(testName.getMethodName() + "_0001");
fail("Should fail to acquire new streams");
} catch (StreamUnavailableException sue) {
// expected
}
assertEquals(1, serviceLocal.getStreamManager().numCached());
assertEquals(1, serviceLocal.getStreamManager().numAcquired());
// should be able to acquire partitions from other streams
String anotherStreamName = testName.getMethodName() + "-another_0001";
Stream anotherStream = serviceLocal.getLogWriter(anotherStreamName);
assertNotNull(anotherStream);
assertEquals(2, serviceLocal.getStreamManager().numCached());
// create write ops
WriteOp op1 = createWriteOp(service, anotherStreamName, 0L);
anotherStream.submit(op1);
WriteResponse wr1 = Await.result(op1.result());
assertEquals("Op 1 should succeed",
StatusCode.SUCCESS, wr1.getHeader().getCode());
assertEquals(2, serviceLocal.getStreamManager().numAcquired());
}
@Test(timeout = 60000)
public void testAcquireStreamsWhenExceedMaxAcquiredPartitions() throws Exception {
String streamName = testName.getMethodName() + "_0000";
DistributedLogConfiguration confLocal = new DistributedLogConfiguration();
confLocal.addConfiguration(dlConf);
confLocal.setMaxCachedPartitionsPerProxy(-1);
confLocal.setMaxAcquiredPartitionsPerProxy(1);
ServerConfiguration serverConfLocal = new ServerConfiguration();
serverConfLocal.addConfiguration(serverConf);
serverConfLocal.setStreamPartitionConverterClass(DelimiterStreamPartitionConverter.class);
DistributedLogServiceImpl serviceLocal = createService(serverConfLocal, confLocal);
Stream stream = serviceLocal.getLogWriter(streamName);
// stream is cached
assertNotNull(stream);
assertEquals(1, serviceLocal.getStreamManager().numCached());
// create write ops
WriteOp op0 = createWriteOp(service, streamName, 0L);
stream.submit(op0);
WriteResponse wr0 = Await.result(op0.result());
assertEquals("Op 0 should succeed",
StatusCode.SUCCESS, wr0.getHeader().getCode());
assertEquals(1, serviceLocal.getStreamManager().numAcquired());
// should be able to cache partitions from same stream
String anotherStreamName = testName.getMethodName() + "_0001";
Stream anotherStream = serviceLocal.getLogWriter(anotherStreamName);
assertNotNull(anotherStream);
assertEquals(2, serviceLocal.getStreamManager().numCached());
// create write ops
WriteOp op1 = createWriteOp(service, anotherStreamName, 0L);
anotherStream.submit(op1);
WriteResponse wr1 = Await.result(op1.result());
assertEquals("Op 1 should fail",
StatusCode.STREAM_UNAVAILABLE, wr1.getHeader().getCode());
assertEquals(1, serviceLocal.getStreamManager().numAcquired());
}
@Test(timeout = 60000)
public void testCloseShouldErrorOutPendingOps() throws Exception {
String streamName = testName.getMethodName();
StreamImpl s = createUnstartedStream(service, streamName);
int numWrites = 10;
List<Future<WriteResponse>> futureList = new ArrayList<Future<WriteResponse>>(numWrites);
for (int i = 0; i < numWrites; i++) {
WriteOp op = createWriteOp(service, streamName, i);
s.submit(op);
futureList.add(op.result());
}
assertEquals(numWrites, s.numPendingOps());
Await.result(s.requestClose("close stream"));
assertEquals("Stream " + streamName + " is set to " + StreamStatus.CLOSED,
StreamStatus.CLOSED, s.getStatus());
for (int i = 0; i < numWrites; i++) {
Future<WriteResponse> future = futureList.get(i);
WriteResponse wr = Await.result(future);
assertEquals("Pending op should fail with " + StatusCode.STREAM_UNAVAILABLE,
StatusCode.STREAM_UNAVAILABLE, wr.getHeader().getCode());
}
}
@Test(timeout = 60000)
public void testCloseTwice() throws Exception {
String streamName = testName.getMethodName();
StreamImpl s = createUnstartedStream(service, streamName);
int numWrites = 10;
List<Future<WriteResponse>> futureList = new ArrayList<Future<WriteResponse>>(numWrites);
for (int i = 0; i < numWrites; i++) {
WriteOp op = createWriteOp(service, streamName, i);
s.submit(op);
futureList.add(op.result());
}
assertEquals(numWrites, s.numPendingOps());
Future<Void> closeFuture0 = s.requestClose("close 0");
assertTrue("Stream " + streamName + " should be set to " + StreamStatus.CLOSING,
StreamStatus.CLOSING == s.getStatus()
|| StreamStatus.CLOSED == s.getStatus());
Future<Void> closeFuture1 = s.requestClose("close 1");
assertTrue("Stream " + streamName + " should be set to " + StreamStatus.CLOSING,
StreamStatus.CLOSING == s.getStatus()
|| StreamStatus.CLOSED == s.getStatus());
Await.result(closeFuture0);
assertEquals("Stream " + streamName + " should be set to " + StreamStatus.CLOSED,
StreamStatus.CLOSED, s.getStatus());
Await.result(closeFuture1);
assertEquals("Stream " + streamName + " should be set to " + StreamStatus.CLOSED,
StreamStatus.CLOSED, s.getStatus());
for (int i = 0; i < numWrites; i++) {
Future<WriteResponse> future = futureList.get(i);
WriteResponse wr = Await.result(future);
assertEquals("Pending op should fail with " + StatusCode.STREAM_UNAVAILABLE,
StatusCode.STREAM_UNAVAILABLE, wr.getHeader().getCode());
}
}
@Test(timeout = 60000)
public void testFailRequestsDuringClosing() throws Exception {
String streamName = testName.getMethodName();
StreamImpl s = createUnstartedStream(service, streamName);
Future<Void> closeFuture = s.requestClose("close");
assertTrue("Stream " + streamName + " should be set to " + StreamStatus.CLOSING,
StreamStatus.CLOSING == s.getStatus()
|| StreamStatus.CLOSED == s.getStatus());
WriteOp op1 = createWriteOp(service, streamName, 0L);
s.submit(op1);
WriteResponse response1 = Await.result(op1.result());
assertEquals("Op should fail with " + StatusCode.STREAM_UNAVAILABLE + " if it is closing",
StatusCode.STREAM_UNAVAILABLE, response1.getHeader().getCode());
Await.result(closeFuture);
assertEquals("Stream " + streamName + " should be set to " + StreamStatus.CLOSED,
StreamStatus.CLOSED, s.getStatus());
WriteOp op2 = createWriteOp(service, streamName, 1L);
s.submit(op2);
WriteResponse response2 = Await.result(op2.result());
assertEquals("Op should fail with " + StatusCode.STREAM_UNAVAILABLE + " if it is closed",
StatusCode.STREAM_UNAVAILABLE, response2.getHeader().getCode());
}
@Test(timeout = 60000)
public void testServiceTimeout() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setOutputBufferSize(Integer.MAX_VALUE)
.setImmediateFlushEnabled(false)
.setPeriodicFlushFrequencyMilliSeconds(0);
ServerConfiguration serverConfLocal = newLocalServerConf();
serverConfLocal.addConfiguration(serverConf);
serverConfLocal.setServiceTimeoutMs(200)
.setStreamProbationTimeoutMs(100);
String streamName = testName.getMethodName();
// create a new service with 200ms timeout
DistributedLogServiceImpl localService = createService(serverConfLocal, confLocal);
StreamManagerImpl streamManager = (StreamManagerImpl) localService.getStreamManager();
int numWrites = 10;
List<Future<WriteResponse>> futureList = new ArrayList<Future<WriteResponse>>(numWrites);
for (int i = 0; i < numWrites; i++) {
futureList.add(localService.write(streamName, createRecord(i)));
}
assertTrue("Stream " + streamName + " should be cached",
streamManager.getCachedStreams().containsKey(streamName));
StreamImpl s = (StreamImpl) streamManager.getCachedStreams().get(streamName);
// the stream should be set CLOSING
while (StreamStatus.CLOSING != s.getStatus()
&& StreamStatus.CLOSED != s.getStatus()) {
TimeUnit.MILLISECONDS.sleep(20);
}
assertNotNull("Writer should be initialized", s.getWriter());
assertNull("No exception should be thrown", s.getLastException());
Future<Void> closeFuture = s.getCloseFuture();
Await.result(closeFuture);
for (int i = 0; i < numWrites; i++) {
assertTrue("Write should not fail before closing",
futureList.get(i).isDefined());
WriteResponse response = Await.result(futureList.get(i));
assertTrue("Op should fail with " + StatusCode.WRITE_CANCELLED_EXCEPTION
+ " but " + response.getHeader().getCode() + " is received.",
StatusCode.BK_TRANSMIT_ERROR == response.getHeader().getCode()
|| StatusCode.WRITE_EXCEPTION == response.getHeader().getCode()
|| StatusCode.WRITE_CANCELLED_EXCEPTION == response.getHeader().getCode());
}
while (streamManager.getCachedStreams().containsKey(streamName)) {
TimeUnit.MILLISECONDS.sleep(20);
}
assertFalse("Stream should be removed from cache",
streamManager.getCachedStreams().containsKey(streamName));
assertFalse("Stream should be removed from acquired cache",
streamManager.getAcquiredStreams().containsKey(streamName));
localService.shutdown();
}
private DistributedLogServiceImpl createConfiguredLocalService() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setOutputBufferSize(0)
.setImmediateFlushEnabled(true)
.setPeriodicFlushFrequencyMilliSeconds(0);
return createService(serverConf, confLocal);
}
private ByteBuffer getTestDataBuffer() {
return ByteBuffer.wrap("test-data".getBytes());
}
@Test(timeout = 60000)
public void testNonDurableWrite() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setOutputBufferSize(Integer.MAX_VALUE)
.setImmediateFlushEnabled(false)
.setPeriodicFlushFrequencyMilliSeconds(0)
.setDurableWriteEnabled(false);
ServerConfiguration serverConfLocal = new ServerConfiguration();
serverConfLocal.addConfiguration(serverConf);
serverConfLocal.enableDurableWrite(false);
serverConfLocal.setServiceTimeoutMs(Integer.MAX_VALUE)
.setStreamProbationTimeoutMs(Integer.MAX_VALUE);
String streamName = testName.getMethodName();
DistributedLogServiceImpl localService =
createService(serverConfLocal, confLocal);
StreamManagerImpl streamManager = (StreamManagerImpl) localService.getStreamManager();
int numWrites = 10;
List<Future<WriteResponse>> futureList = new ArrayList<Future<WriteResponse>>();
for (int i = 0; i < numWrites; i++) {
futureList.add(localService.write(streamName, createRecord(i)));
}
assertTrue("Stream " + streamName + " should be cached",
streamManager.getCachedStreams().containsKey(streamName));
List<WriteResponse> resultList = Await.result(Future.collect(futureList));
for (WriteResponse wr : resultList) {
assertEquals(DLSN.InvalidDLSN, DLSN.deserialize(wr.getDlsn()));
}
localService.shutdown();
}
@Test(timeout = 60000)
public void testWriteOpNoChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext();
Future<WriteResponse> result = localService.writeWithContext("test", getTestDataBuffer(), ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testTruncateOpNoChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext();
Future<WriteResponse> result = localService.truncate("test", new DLSN(1, 2, 3).serialize(), ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testStreamOpNoChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext();
HeartbeatOptions option = new HeartbeatOptions();
option.setSendHeartBeatToReader(true);
// hearbeat to acquire the stream and then release the stream
Future<WriteResponse> result = localService.heartbeatWithOptions("test", ctx, option);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
result = localService.release("test", ctx);
resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
// heartbeat to acquire the stream and then delete the stream
result = localService.heartbeatWithOptions("test", ctx, option);
resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
result = localService.delete("test", ctx);
resp = Await.result(result);
assertEquals(StatusCode.SUCCESS, resp.getHeader().getCode());
// shutdown the local service
localService.shutdown();
}
@Test(timeout = 60000)
public void testWriteOpChecksumBadChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext().setCrc32(999);
Future<WriteResponse> result = localService.writeWithContext("test", getTestDataBuffer(), ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testWriteOpChecksumBadStream() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext().setCrc32(
ProtocolUtils.writeOpCRC32("test", getTestDataBuffer()));
Future<WriteResponse> result = localService.writeWithContext("test1", getTestDataBuffer(), ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testWriteOpChecksumBadData() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
ByteBuffer buffer = getTestDataBuffer();
WriteContext ctx = new WriteContext().setCrc32(
ProtocolUtils.writeOpCRC32("test", buffer));
// Overwrite 1 byte to corrupt data.
buffer.put(1, (byte) 0xab);
Future<WriteResponse> result = localService.writeWithContext("test", buffer, ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testStreamOpChecksumBadChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext().setCrc32(999);
Future<WriteResponse> result = localService.heartbeat("test", ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
result = localService.release("test", ctx);
resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
result = localService.delete("test", ctx);
resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
localService.shutdown();
}
@Test(timeout = 60000)
public void testTruncateOpChecksumBadChecksum() throws Exception {
DistributedLogServiceImpl localService = createConfiguredLocalService();
WriteContext ctx = new WriteContext().setCrc32(999);
Future<WriteResponse> result = localService.truncate("test", new DLSN(1, 2, 3).serialize(), ctx);
WriteResponse resp = Await.result(result);
assertEquals(StatusCode.CHECKSUM_FAILED, resp.getHeader().getCode());
localService.shutdown();
}
private WriteOp getWriteOp(String name, SettableFeature disabledFeature, Long checksum) {
return new WriteOp(name,
ByteBuffer.wrap("test".getBytes()),
new NullStatsLogger(),
new NullStatsLogger(),
new IdentityStreamPartitionConverter(),
new ServerConfiguration(),
(byte) 0,
checksum,
false,
disabledFeature,
DefaultAccessControlManager.INSTANCE);
}
@Test(timeout = 60000)
public void testStreamOpBadChecksumWithChecksumDisabled() throws Exception {
String streamName = testName.getMethodName();
SettableFeature disabledFeature = new SettableFeature("", 0);
WriteOp writeOp0 = getWriteOp(streamName, disabledFeature, 919191L);
WriteOp writeOp1 = getWriteOp(streamName, disabledFeature, 919191L);
try {
writeOp0.preExecute();
fail("should have thrown");
} catch (Exception ex) {
}
disabledFeature.set(1);
writeOp1.preExecute();
}
@Test(timeout = 60000)
public void testStreamOpGoodChecksumWithChecksumDisabled() throws Exception {
String streamName = testName.getMethodName();
SettableFeature disabledFeature = new SettableFeature("", 1);
WriteOp writeOp0 = getWriteOp(
streamName,
disabledFeature,
ProtocolUtils.writeOpCRC32(streamName, Unpooled.wrappedBuffer("test".getBytes())));
WriteOp writeOp1 = getWriteOp(
streamName,
disabledFeature,
ProtocolUtils.writeOpCRC32(streamName, Unpooled.wrappedBuffer("test".getBytes())));
writeOp0.preExecute();
disabledFeature.set(0);
writeOp1.preExecute();
}
@Test(timeout = 60000)
public void testCloseStreamsShouldFlush() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setOutputBufferSize(Integer.MAX_VALUE)
.setImmediateFlushEnabled(false)
.setPeriodicFlushFrequencyMilliSeconds(0);
String streamNamePrefix = testName.getMethodName();
DistributedLogServiceImpl localService = createService(serverConf, confLocal);
StreamManagerImpl streamManager = (StreamManagerImpl) localService.getStreamManager();
int numStreams = 10;
int numWrites = 10;
List<Future<WriteResponse>> futureList =
Lists.newArrayListWithExpectedSize(numStreams * numWrites);
for (int i = 0; i < numStreams; i++) {
String streamName = streamNamePrefix + "-" + i;
HeartbeatOptions hbOptions = new HeartbeatOptions();
hbOptions.setSendHeartBeatToReader(true);
// make sure the first log segment of each stream created
Await.result(localService.heartbeatWithOptions(streamName, new WriteContext(), hbOptions));
for (int j = 0; j < numWrites; j++) {
futureList.add(localService.write(streamName, createRecord(i * numWrites + j)));
}
}
assertEquals("There should be " + numStreams + " streams in cache",
numStreams, streamManager.getCachedStreams().size());
while (streamManager.getAcquiredStreams().size() < numStreams) {
TimeUnit.MILLISECONDS.sleep(20);
}
Future<List<Void>> closeResult = localService.closeStreams();
List<Void> closedStreams = Await.result(closeResult);
assertEquals("There should be " + numStreams + " streams closed",
numStreams, closedStreams.size());
// all writes should be flushed
for (Future<WriteResponse> future : futureList) {
WriteResponse response = Await.result(future);
assertTrue("Op should succeed or be rejected : " + response.getHeader().getCode(),
StatusCode.SUCCESS == response.getHeader().getCode()
|| StatusCode.WRITE_EXCEPTION == response.getHeader().getCode()
|| StatusCode.STREAM_UNAVAILABLE == response.getHeader().getCode());
}
assertTrue("There should be no streams in the cache",
streamManager.getCachedStreams().isEmpty());
assertTrue("There should be no streams in the acquired cache",
streamManager.getAcquiredStreams().isEmpty());
localService.shutdown();
}
@Test(timeout = 60000)
public void testCloseStreamsShouldAbort() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setOutputBufferSize(Integer.MAX_VALUE)
.setImmediateFlushEnabled(false)
.setPeriodicFlushFrequencyMilliSeconds(0);
String streamNamePrefix = testName.getMethodName();
DistributedLogServiceImpl localService = createService(serverConf, confLocal);
StreamManagerImpl streamManager = (StreamManagerImpl) localService.getStreamManager();
int numStreams = 10;
int numWrites = 10;
List<Future<WriteResponse>> futureList =
Lists.newArrayListWithExpectedSize(numStreams * numWrites);
for (int i = 0; i < numStreams; i++) {
String streamName = streamNamePrefix + "-" + i;
HeartbeatOptions hbOptions = new HeartbeatOptions();
hbOptions.setSendHeartBeatToReader(true);
// make sure the first log segment of each stream created
Await.result(localService.heartbeatWithOptions(streamName, new WriteContext(), hbOptions));
for (int j = 0; j < numWrites; j++) {
futureList.add(localService.write(streamName, createRecord(i * numWrites + j)));
}
}
assertEquals("There should be " + numStreams + " streams in cache",
numStreams, streamManager.getCachedStreams().size());
while (streamManager.getAcquiredStreams().size() < numStreams) {
TimeUnit.MILLISECONDS.sleep(20);
}
for (Stream s : streamManager.getAcquiredStreams().values()) {
StreamImpl stream = (StreamImpl) s;
stream.setStatus(StreamStatus.ERROR);
}
Future<List<Void>> closeResult = localService.closeStreams();
List<Void> closedStreams = Await.result(closeResult);
assertEquals("There should be " + numStreams + " streams closed",
numStreams, closedStreams.size());
// all writes should be flushed
for (Future<WriteResponse> future : futureList) {
WriteResponse response = Await.result(future);
assertTrue("Op should fail with " + StatusCode.BK_TRANSMIT_ERROR + " or be rejected : "
+ response.getHeader().getCode(),
StatusCode.BK_TRANSMIT_ERROR == response.getHeader().getCode()
|| StatusCode.WRITE_EXCEPTION == response.getHeader().getCode()
|| StatusCode.WRITE_CANCELLED_EXCEPTION == response.getHeader().getCode());
}
// acquired streams should all been removed after we close them
assertTrue("There should be no streams in the acquired cache",
streamManager.getAcquiredStreams().isEmpty());
localService.shutdown();
// cached streams wouldn't be removed immediately after streams are closed
// but they should be removed after we shutdown the service
assertTrue("There should be no streams in the cache after shutting down the service",
streamManager.getCachedStreams().isEmpty());
}
@Test(timeout = 60000)
public void testShutdown() throws Exception {
service.shutdown();
StreamManagerImpl streamManager = (StreamManagerImpl) service.getStreamManager();
WriteResponse response =
Await.result(service.write(testName.getMethodName(), createRecord(0L)));
assertEquals("Write should fail with " + StatusCode.SERVICE_UNAVAILABLE,
StatusCode.SERVICE_UNAVAILABLE, response.getHeader().getCode());
assertTrue("There should be no streams created after shutdown",
streamManager.getCachedStreams().isEmpty());
assertTrue("There should be no streams acquired after shutdown",
streamManager.getAcquiredStreams().isEmpty());
}
@Test(timeout = 60000)
public void testGetOwner() throws Exception {
((LocalRoutingService) service.getRoutingService())
.addHost("stream-0", service.getServiceAddress().getSocketAddress())
.setAllowRetrySameHost(false);
service.startPlacementPolicy();
WriteResponse response = Await.result(service.getOwner("stream-1", new WriteContext()));
assertEquals(StatusCode.FOUND, response.getHeader().getCode());
assertEquals(service.getServiceAddress().toString(),
response.getHeader().getLocation());
// service cache "stream-2"
StreamImpl stream = (StreamImpl) service.getStreamManager().getOrCreateStream("stream-2", false);
// create write ops to stream-2 to make service acquire the stream
WriteOp op = createWriteOp(service, "stream-2", 0L);
stream.submit(op);
stream.start();
WriteResponse wr = Await.result(op.result());
assertEquals("Op should succeed",
StatusCode.SUCCESS, wr.getHeader().getCode());
assertEquals("Service should acquire stream",
StreamStatus.INITIALIZED, stream.getStatus());
assertNotNull(stream.getManager());
assertNotNull(stream.getWriter());
assertNull(stream.getLastException());
// the stream is acquired
response = Await.result(service.getOwner("stream-2", new WriteContext()));
assertEquals(StatusCode.FOUND, response.getHeader().getCode());
assertEquals(service.getServiceAddress().toString(),
response.getHeader().getLocation());
}
}
|
apache/ignite-3 | 38,174 | modules/sql-engine/src/main/java/org/apache/ignite/internal/sql/engine/util/TypeUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.sql.engine.util;
import static org.apache.calcite.sql.type.SqlTypeName.BINARY_TYPES;
import static org.apache.calcite.sql.type.SqlTypeName.CHAR_TYPES;
import static org.apache.calcite.sql.type.SqlTypeName.STRING_TYPES;
import static org.apache.ignite.internal.sql.engine.util.IgniteMath.convertToIntExact;
import static org.apache.ignite.lang.ErrorGroups.Sql.STMT_VALIDATION_ERR;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.calcite.avatica.util.ByteString;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.sql.type.BasicSqlType;
import org.apache.calcite.sql.type.IntervalSqlType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeName.Limit;
import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.ignite.internal.sql.engine.SchemaAwareConverter;
import org.apache.ignite.internal.sql.engine.exec.ExecutionContext;
import org.apache.ignite.internal.sql.engine.exec.RowHandler;
import org.apache.ignite.internal.sql.engine.exec.RowHandler.RowBuilder;
import org.apache.ignite.internal.sql.engine.exec.RowHandler.RowFactory;
import org.apache.ignite.internal.sql.engine.exec.row.BaseTypeSpec;
import org.apache.ignite.internal.sql.engine.exec.row.RowSchema;
import org.apache.ignite.internal.sql.engine.exec.row.RowSchemaTypes;
import org.apache.ignite.internal.sql.engine.exec.row.RowType;
import org.apache.ignite.internal.sql.engine.exec.row.TypeSpec;
import org.apache.ignite.internal.sql.engine.prepare.ParameterType;
import org.apache.ignite.internal.sql.engine.type.IgniteTypeFactory;
import org.apache.ignite.internal.type.DecimalNativeType;
import org.apache.ignite.internal.type.NativeType;
import org.apache.ignite.internal.type.NativeTypes;
import org.apache.ignite.internal.type.TemporalNativeType;
import org.apache.ignite.internal.type.VarlenNativeType;
import org.apache.ignite.sql.ColumnMetadata;
import org.apache.ignite.sql.ColumnType;
import org.apache.ignite.sql.SqlException;
import org.jetbrains.annotations.Nullable;
/**
* TypeUtils.
* TODO Documentation https://issues.apache.org/jira/browse/IGNITE-15859
*/
public class TypeUtils {
public static final SchemaAwareConverter<Object, Object> IDENTITY_ROW_CONVERTER = (idx, r) -> r;
private static final Set<SqlTypeName> CONVERTABLE_TYPES = EnumSet.of(
SqlTypeName.DATE,
SqlTypeName.TIME,
SqlTypeName.BINARY,
SqlTypeName.VARBINARY,
SqlTypeName.TIME_WITH_LOCAL_TIME_ZONE,
SqlTypeName.TIMESTAMP,
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
SqlTypeName.INTERVAL_SECOND,
SqlTypeName.INTERVAL_MINUTE,
SqlTypeName.INTERVAL_MINUTE_SECOND,
SqlTypeName.INTERVAL_HOUR,
SqlTypeName.INTERVAL_HOUR_MINUTE,
SqlTypeName.INTERVAL_HOUR_SECOND,
SqlTypeName.INTERVAL_DAY,
SqlTypeName.INTERVAL_DAY_HOUR,
SqlTypeName.INTERVAL_DAY_MINUTE,
SqlTypeName.INTERVAL_DAY_SECOND,
SqlTypeName.INTERVAL_MONTH,
SqlTypeName.INTERVAL_YEAR,
SqlTypeName.INTERVAL_YEAR_MONTH
);
/**
* Returns the upper bound value for the given SQL type as a {@link BigDecimal}, if applicable.
*
* <p>If the type is not a numeric type, this method returns {@code null}.</p>
*
* @param type The {@link RelDataType} representing the SQL type
* @return A {@link BigDecimal} representing the maximum value for the given type, or {@code null} if the type is not a numeric.
*/
public static @Nullable BigDecimal upperBoundFor(RelDataType type) {
switch (type.getSqlTypeName()) {
case TINYINT: return BigDecimal.valueOf(Byte.MAX_VALUE);
case SMALLINT: return BigDecimal.valueOf(Short.MAX_VALUE);
case INTEGER: return BigDecimal.valueOf(Integer.MAX_VALUE);
case BIGINT: return BigDecimal.valueOf(Long.MAX_VALUE);
case REAL: return BigDecimal.valueOf(Float.MAX_VALUE);
case DOUBLE: return BigDecimal.valueOf(Double.MAX_VALUE);
case DECIMAL: return (BigDecimal) type.getSqlTypeName()
.getLimit(true, Limit.OVERFLOW, false, type.getPrecision(), type.getScale());
default: return null;
}
}
/**
* Returns the lower bound value for the given SQL type as a {@link BigDecimal}, if applicable.
*
* <p>If the type is not a numeric type, this method returns {@code null}.</p>
*
* @param type The {@link RelDataType} representing the SQL type
* @return A {@link BigDecimal} representing the minimum value for the given type, or {@code null} if the type is not a numeric.
*/
public static @Nullable BigDecimal lowerBoundFor(RelDataType type) {
switch (type.getSqlTypeName()) {
case TINYINT: return BigDecimal.valueOf(Byte.MIN_VALUE);
case SMALLINT: return BigDecimal.valueOf(Short.MIN_VALUE);
case INTEGER: return BigDecimal.valueOf(Integer.MIN_VALUE);
case BIGINT: return BigDecimal.valueOf(Long.MIN_VALUE);
case REAL: return BigDecimal.valueOf(-Float.MAX_VALUE);
case DOUBLE: return BigDecimal.valueOf(-Double.MAX_VALUE);
case DECIMAL: return (BigDecimal) type.getSqlTypeName()
.getLimit(false, Limit.OVERFLOW, false, type.getPrecision(), type.getScale());
default: return null;
}
}
/** Creates parameter metadata from the given logical type. */
public static ParameterType fromRelDataType(RelDataType type) {
ColumnType columnType = columnType(type);
assert columnType != null : "No column type for " + type;
int precision = columnType.lengthAllowed() || columnType.precisionAllowed()
? type.getPrecision()
: ColumnMetadata.UNDEFINED_PRECISION;
int scale = columnType.scaleAllowed() ? type.getScale() : ColumnMetadata.UNDEFINED_SCALE;
return new ParameterType(columnType, precision, scale, type.isNullable());
}
private static class SupportedParamClassesHolder {
// TODO: https://issues.apache.org/jira/browse/IGNITE-17373
static final Set<ColumnType> UNSUPPORTED_COLUMN_TYPES_AS_PARAMETERS = Set.of(ColumnType.PERIOD, ColumnType.DURATION);
static final Set<Class<?>> SUPPORTED_PARAM_CLASSES;
static {
SUPPORTED_PARAM_CLASSES = Arrays.stream(ColumnType.values())
.filter(t -> !UNSUPPORTED_COLUMN_TYPES_AS_PARAMETERS.contains(t))
.map(ColumnType::javaClass).collect(Collectors.toUnmodifiableSet());
}
}
private static Set<Class<?>> supportedParamClasses() {
return SupportedParamClassesHolder.SUPPORTED_PARAM_CLASSES;
}
/** Return {@code true} if supplied object is suitable as dynamic parameter. */
public static boolean supportParamInstance(@Nullable Object param) {
return param == null || supportedParamClasses().contains(param.getClass());
}
/**
* CombinedRowType.
* TODO Documentation https://issues.apache.org/jira/browse/IGNITE-15859
*/
public static RelDataType combinedRowType(IgniteTypeFactory typeFactory, RelDataType... types) {
RelDataTypeFactory.Builder builder = new RelDataTypeFactory.Builder(typeFactory);
Set<String> names = new HashSet<>();
for (RelDataType type : types) {
for (RelDataTypeField field : type.getFieldList()) {
int idx = 0;
String fieldName = field.getName();
while (!names.add(fieldName)) {
fieldName = field.getName() + idx++;
}
builder.add(fieldName, field.getType());
}
}
return builder.build();
}
/** Assembly output type from input types. */
public static RelDataType createRowType(IgniteTypeFactory typeFactory, List<RelDataType> fields) {
return createRowType(typeFactory, fields, "$F");
}
private static RelDataType createRowType(IgniteTypeFactory typeFactory, List<RelDataType> fields, String namePreffix) {
List<String> names = IntStream.range(0, fields.size())
.mapToObj(ord -> namePreffix + ord)
.collect(Collectors.toList());
return typeFactory.createStructType(fields, names);
}
/**
* Provide a function to convert internal representation of sql results into external types.
*
* @param ectx SQL execution context.
* @param resultType Type of result.
* @return Schema-aware converting function.
*/
public static SchemaAwareConverter<Object, Object> resultTypeConverter(ExecutionContext<?> ectx, RelDataType resultType) {
assert resultType.isStruct();
if (hasConvertableFields(resultType)) {
List<RelDataType> types = RelOptUtil.getFieldTypeList(resultType);
Function<Object, Object>[] converters = (Function<Object, Object>[]) new Function[types.size()];
for (int i = 0; i < types.size(); i++) {
converters[i] = fieldConverter(ectx, types.get(i));
}
return (idx, r) -> {
assert idx >= 0 && idx < converters.length;
return converters[idx].apply(r);
};
}
return IDENTITY_ROW_CONVERTER;
}
private static Function<Object, Object> fieldConverter(ExecutionContext<?> ectx, RelDataType fieldType) {
Type storageType = ectx.getTypeFactory().getResultClass(fieldType);
if (isConvertableType(fieldType)) {
return v -> fromInternal(v, storageType);
}
return Function.identity();
}
/**
* IsConvertableType.
* TODO Documentation https://issues.apache.org/jira/browse/IGNITE-15859
*/
public static boolean isConvertableType(RelDataType type) {
return CONVERTABLE_TYPES.contains(type.getSqlTypeName());
}
private static boolean hasConvertableFields(RelDataType resultType) {
for (RelDataTypeField field : resultType.getFieldList()) {
if (isConvertableType(field.getType())) {
return true;
}
}
return false;
}
/**
* Converts the given value to its presentation used by the execution engine.
*/
public static Object toInternal(Object val, ColumnType spec) {
switch (spec) {
case INT8: {
assert val instanceof Byte : val.getClass();
return val;
}
case INT16: {
assert val instanceof Short : val.getClass();
return val;
}
case INT32: {
assert val instanceof Integer : val.getClass();
return val;
}
case INT64: {
assert val instanceof Long : val.getClass();
return val;
}
case FLOAT: {
assert val instanceof Float : val.getClass();
return val;
}
case DOUBLE: {
assert val instanceof Double : val.getClass();
return val;
}
case DECIMAL: {
assert val instanceof BigDecimal : val.getClass();
return val;
}
case UUID: {
assert val instanceof UUID : val.getClass();
return val;
}
case STRING: {
assert val instanceof String : val.getClass();
return val;
}
case BYTE_ARRAY: {
if (val instanceof String) {
return new ByteString(((String) val).getBytes(StandardCharsets.UTF_8));
} else if (val instanceof byte[]) {
return new ByteString((byte[]) val);
} else {
assert val instanceof ByteString : val.getClass();
return val;
}
}
case DATE: {
assert val instanceof LocalDate : val.getClass();
return (int) ((LocalDate) val).toEpochDay();
}
case TIME: {
assert val instanceof LocalTime : val.getClass();
return (int) (TimeUnit.NANOSECONDS.toMillis(((LocalTime) val).toNanoOfDay()));
}
case DATETIME: {
assert val instanceof LocalDateTime : val.getClass();
var dt = (LocalDateTime) val;
return TimeUnit.SECONDS.toMillis(dt.toEpochSecond(ZoneOffset.UTC)) + TimeUnit.NANOSECONDS.toMillis(dt.getNano());
}
case TIMESTAMP: {
assert val instanceof Instant : val.getClass();
return ((Instant) val).toEpochMilli();
}
case BOOLEAN:
assert val instanceof Boolean : val.getClass();
return val;
case DURATION:
return ((Duration) val).toMillis();
case PERIOD:
return convertToIntExact(((Period) val).toTotalMonths());
default: {
throw new AssertionError("Type is not supported: " + spec);
}
}
}
/**
* Converts the value from its presentation used by the execution engine.
*/
// TODO: https://issues.apache.org/jira/browse/IGNITE-23295 Remove this method.
@Deprecated(forRemoval = true)
public static @Nullable Object fromInternal(@Nullable Object val, Type storageType) {
if (val == null) {
return null;
} else if (storageType == LocalDate.class && val instanceof Integer) {
return LocalDate.ofEpochDay((Integer) val);
} else if (storageType == LocalTime.class && val instanceof Integer) {
return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(Long.valueOf((Integer) val)));
} else if (storageType == LocalDateTime.class && (val instanceof Long)) {
return LocalDateTime.ofInstant(Instant.ofEpochMilli((long) val), ZoneOffset.UTC);
} else if (storageType == Instant.class && val instanceof Long) {
return Instant.ofEpochMilli((long) val);
} else if (storageType == Duration.class && val instanceof Long) {
return Duration.ofMillis((Long) val);
} else if (storageType == Period.class && val instanceof Integer) {
return Period.of((Integer) val / 12, (Integer) val % 12, 0);
} else if (storageType == byte[].class && val instanceof ByteString) {
return ((ByteString) val).getBytes();
} else {
return val;
}
}
/**
* Converts the value from its presentation used by the execution engine.
*/
public static Object fromInternal(Object val, ColumnType spec) {
switch (spec) {
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT:
case DOUBLE:
case DECIMAL:
case UUID:
case STRING:
case BOOLEAN:
return val;
case BYTE_ARRAY:
return ((ByteString) val).getBytes();
case DATE:
return LocalDate.ofEpochDay((Integer) val);
case TIME:
return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(Long.valueOf((Integer) val)));
case DATETIME:
return LocalDateTime.ofInstant(Instant.ofEpochMilli((Long) val), ZoneOffset.UTC);
case TIMESTAMP:
return Instant.ofEpochMilli((Long) val);
case DURATION: {
assert val instanceof Long;
return Duration.ofMillis((Long) val);
}
case PERIOD: {
assert val instanceof Integer;
return Period.of((Integer) val / 12, (Integer) val % 12, 0);
}
default: {
throw new AssertionError("Type is not supported: " + spec);
}
}
}
/**
* Convert calcite date type to Ignite native type.
*/
public static ColumnType columnType(RelDataType type) {
switch (type.getSqlTypeName()) {
case VARCHAR:
case CHAR:
return ColumnType.STRING;
case DATE:
return ColumnType.DATE;
case TIME:
case TIME_WITH_LOCAL_TIME_ZONE:
return ColumnType.TIME;
case INTEGER:
return ColumnType.INT32;
case TIMESTAMP:
return ColumnType.DATETIME;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return ColumnType.TIMESTAMP;
case BIGINT:
return ColumnType.INT64;
case SMALLINT:
return ColumnType.INT16;
case TINYINT:
return ColumnType.INT8;
case BOOLEAN:
return ColumnType.BOOLEAN;
case DECIMAL:
return ColumnType.DECIMAL;
case DOUBLE:
return ColumnType.DOUBLE;
case REAL:
case FLOAT:
return ColumnType.FLOAT;
case BINARY:
case VARBINARY:
case ANY:
case OTHER:
return ColumnType.BYTE_ARRAY;
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
return ColumnType.PERIOD;
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
case INTERVAL_DAY:
return ColumnType.DURATION;
case NULL:
return ColumnType.NULL;
case UUID:
return ColumnType.UUID;
default:
throw new IllegalArgumentException("Unexpected type: " + type.getSqlTypeName());
}
}
/**
* Converts a {@link NativeType native type} to {@link RelDataType relational type}.
*
* @param factory Type factory.
* @param nativeType A native type to convert.
* @return Relational type.
*/
public static RelDataType native2relationalType(RelDataTypeFactory factory, NativeType nativeType) {
switch (nativeType.spec()) {
case BOOLEAN:
return factory.createSqlType(SqlTypeName.BOOLEAN);
case INT8:
return factory.createSqlType(SqlTypeName.TINYINT);
case INT16:
return factory.createSqlType(SqlTypeName.SMALLINT);
case INT32:
return factory.createSqlType(SqlTypeName.INTEGER);
case INT64:
return factory.createSqlType(SqlTypeName.BIGINT);
case FLOAT:
return factory.createSqlType(SqlTypeName.REAL);
case DOUBLE:
return factory.createSqlType(SqlTypeName.DOUBLE);
case DECIMAL:
assert nativeType instanceof DecimalNativeType;
var decimal = (DecimalNativeType) nativeType;
return factory.createSqlType(SqlTypeName.DECIMAL, decimal.precision(), decimal.scale());
case UUID:
return factory.createSqlType(SqlTypeName.UUID);
case STRING: {
assert nativeType instanceof VarlenNativeType;
var varlen = (VarlenNativeType) nativeType;
return factory.createSqlType(SqlTypeName.VARCHAR, varlen.length());
}
case BYTE_ARRAY: {
assert nativeType instanceof VarlenNativeType;
var varlen = (VarlenNativeType) nativeType;
return factory.createSqlType(SqlTypeName.VARBINARY, varlen.length());
}
case DATE:
return factory.createSqlType(SqlTypeName.DATE);
case TIME:
assert nativeType instanceof TemporalNativeType;
var time = (TemporalNativeType) nativeType;
return factory.createSqlType(SqlTypeName.TIME, time.precision());
case TIMESTAMP:
assert nativeType instanceof TemporalNativeType;
var ts = (TemporalNativeType) nativeType;
return factory.createSqlType(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE, ts.precision());
case DATETIME:
assert nativeType instanceof TemporalNativeType;
var dt = (TemporalNativeType) nativeType;
return factory.createSqlType(SqlTypeName.TIMESTAMP, dt.precision());
default:
throw new IllegalStateException("Unexpected native type " + nativeType);
}
}
/**
* Converts a {@link NativeType native type} to {@link RelDataType relational type} with respect to the nullability flag.
*
* @param factory Type factory.
* @param nativeType A native type to convert.
* @param nullable A flag that specify whether the resulting type should be nullable or not.
* @return Relational type.
*/
public static RelDataType native2relationalType(RelDataTypeFactory factory, NativeType nativeType, boolean nullable) {
return factory.createTypeWithNullability(native2relationalType(factory, nativeType), nullable);
}
/**
* Converts a {@link NativeType native types} to {@link RelDataType relational types}.
*
* @param factory Type factory.
* @param nativeTypes A native types to convert.
* @return Relational types.
*/
public static List<RelDataType> native2relationalTypes(RelDataTypeFactory factory, NativeType... nativeTypes) {
return Arrays.stream(nativeTypes).map(t -> native2relationalType(factory, t)).collect(Collectors.toList());
}
/** Converts {@link ColumnType} to corresponding {@link NativeType}. */
public static NativeType columnType2NativeType(ColumnType columnType, int precision, int scale, int length) {
switch (columnType) {
case BOOLEAN:
return NativeTypes.BOOLEAN;
case INT8:
return NativeTypes.INT8;
case INT16:
return NativeTypes.INT16;
case INT32:
return NativeTypes.INT32;
case INT64:
return NativeTypes.INT64;
case FLOAT:
return NativeTypes.FLOAT;
case DOUBLE:
return NativeTypes.DOUBLE;
case DECIMAL:
return NativeTypes.decimalOf(precision, scale);
case DATE:
return NativeTypes.DATE;
case TIME:
return NativeTypes.time(precision);
case DATETIME:
return NativeTypes.datetime(precision);
case TIMESTAMP:
return NativeTypes.timestamp(precision);
case UUID:
return NativeTypes.UUID;
case STRING:
return NativeTypes.stringOf(length);
case BYTE_ARRAY:
return NativeTypes.blobOf(length);
// fallthrough
case PERIOD:
case DURATION:
case NULL:
default:
throw new IllegalArgumentException("No NativeType for type: " + columnType);
}
}
/** Checks whether cast operation is necessary in {@code SearchBound}. */
public static boolean needCastInSearchBounds(IgniteTypeFactory typeFactory, RelDataType fromType, RelDataType toType) {
// Checks for character and binary types should allow comparison
// between types with precision, types w/o precision, and varying non-varying length variants.
// Otherwise the optimizer wouldn't pick an index for conditions such as
// col (VARCHAR(M)) = CAST(s AS VARCHAR(N) (M != N) , col (VARCHAR) = CAST(s AS VARCHAR(N))
// No need to cast between char and varchar.
if (SqlTypeUtil.isCharacter(toType) && SqlTypeUtil.isCharacter(fromType)) {
return false;
}
// No need to cast if the source type precedence list
// contains target type. i.e. do not cast from
// tinyint to int or int to bigint.
if (fromType.getPrecedenceList().containsType(toType)
&& SqlTypeUtil.isIntType(fromType)
&& SqlTypeUtil.isIntType(toType)) {
return false;
}
// TIME, TIMESTAMP and TIMESTAMP_WLTZ can use index, ignoring precision.
if (fromType.getSqlTypeName() == toType.getSqlTypeName() && SqlTypeUtil.isDatetime(fromType)) {
return false;
}
// Implicit type coercion does not handle nullability.
if (SqlTypeUtil.equalSansNullability(typeFactory, fromType, toType)) {
return false;
}
// Should keep sync with rules in SqlTypeCoercionRule.
assert SqlTypeUtil.canCastFrom(toType, fromType, true);
return true;
}
/**
* Checks that {@code toType} and {@code fromType} have compatible type families taking into account custom data types. Types {@code T1}
* and {@code T2} have compatible type families if {@code T1} can be assigned to {@code T2} and vice-versa.
*
* @see SqlTypeUtil#canAssignFrom(RelDataType, RelDataType)
*/
public static boolean typeFamiliesAreCompatible(RelDataTypeFactory typeFactory, RelDataType toType, RelDataType fromType) {
// Same types are always compatible.
if (SqlTypeUtil.equalSansNullability(typeFactory, toType, fromType)) {
return true;
}
// NULL is compatible with all types.
if (fromType.getSqlTypeName() == SqlTypeName.NULL || toType.getSqlTypeName() == SqlTypeName.NULL) {
return true;
}
if (toType.isStruct() && fromType.isStruct()) {
if (toType.getFieldCount() != fromType.getFieldCount()) {
return false;
}
for (int i = 0; i < toType.getFieldCount(); i++) {
RelDataType type1 = toType.getFieldList().get(i).getType();
RelDataType type2 = fromType.getFieldList().get(i).getType();
if (!typeFamiliesAreCompatible(typeFactory, type1, type2)) {
return false;
}
}
return true;
}
return SqlTypeUtil.canAssignFrom(toType, fromType)
&& SqlTypeUtil.canAssignFrom(fromType, toType);
}
/**
* Checks that given types have compatible type families taking into account custom data types. Types {@code T1}
* and {@code T2} have compatible type families if {@code T1} can be assigned to {@code T2} and vice-versa.
*
* @see SqlTypeUtil#canAssignFrom(RelDataType, RelDataType)
*/
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
public static boolean typeFamiliesAreCompatible(RelDataTypeFactory typeFactory, RelDataType... types) {
return typeFamiliesAreCompatible(typeFactory, List.of(types));
}
/**
* Checks that given types have compatible type families taking into account custom data types. Types {@code T1}
* and {@code T2} have compatible type families if {@code T1} can be assigned to {@code T2} and vice-versa.
*
* @see SqlTypeUtil#canAssignFrom(RelDataType, RelDataType)
*/
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
public static boolean typeFamiliesAreCompatible(RelDataTypeFactory typeFactory, List<RelDataType> types) {
if (types.size() < 2) {
return true;
}
RelDataType firstType = null;
for (RelDataType type : types) {
if (firstType == null) {
if (SqlTypeUtil.isNull(type)) {
// null is compatible with any other type, therefore we need to find
// first type that is not NULL to make check valid
continue;
}
firstType = type;
} else if (!typeFamiliesAreCompatible(typeFactory, firstType, type)) {
return false;
}
}
return true;
}
/** Creates an instance of {@link RowSchema} from a list of the given {@link RelDataType}s. */
public static RowSchema rowSchemaFromRelTypes(List<RelDataType> types) {
RowSchema.Builder fieldTypes = RowSchema.builder();
for (RelDataType relType : types) {
TypeSpec typeSpec = convertToTypeSpec(relType);
fieldTypes.addField(typeSpec);
}
return fieldTypes.build();
}
private static TypeSpec convertToTypeSpec(RelDataType type) {
boolean simpleType = type instanceof BasicSqlType;
boolean nullable = type.isNullable();
if (SqlTypeName.ANY == type.getSqlTypeName()) {
// TODO Some JSON functions that return ANY as well : https://issues.apache.org/jira/browse/IGNITE-20163
return new BaseTypeSpec(null, nullable);
} else if (SqlTypeUtil.isNull(type)) {
return RowSchemaTypes.NULL;
} else if (simpleType) {
NativeType nativeType = IgniteTypeFactory.relDataTypeToNative(type);
return RowSchemaTypes.nativeTypeWithNullability(nativeType, nullable);
} else if (type instanceof IntervalSqlType) {
IntervalSqlType intervalType = (IntervalSqlType) type;
boolean yearMonth = intervalType.getIntervalQualifier().isYearMonth();
if (yearMonth) {
// YEAR MONTH interval is stored as number of days in ints.
return RowSchemaTypes.nativeTypeWithNullability(NativeTypes.PERIOD, nullable);
} else {
// DAY interval is stored as time as long.
return RowSchemaTypes.nativeTypeWithNullability(NativeTypes.DURATION, nullable);
}
} else if (SqlTypeUtil.isRow(type)) {
List<TypeSpec> fields = new ArrayList<>();
for (RelDataTypeField field : type.getFieldList()) {
TypeSpec fieldTypeSpec = convertToTypeSpec(field.getType());
fields.add(fieldTypeSpec);
}
return new RowType(fields, type.isNullable());
} else if (SqlTypeUtil.isMap(type) || SqlTypeUtil.isMultiset(type) || SqlTypeUtil.isArray(type)) {
// TODO https://issues.apache.org/jira/browse/IGNITE-20162
// Add collection types support
throw new IllegalArgumentException("Collection types is not supported: " + type);
} else {
throw new IllegalArgumentException("Unexpected type: " + type);
}
}
/** Check limitation for character and binary types and throws exception if row does not fit into type defined.
* <br>
* Store assignment section defines:
* If the declared type of T is fixed-length character string with length in characters L and
* the length in characters M of V is larger than L, then:
* <br>
* 1) If the rightmost M-L characters of V are all space(s), then the value of T is set to
* the first L characters of V.
* <br>
* 2) If one or more of the rightmost M-L characters of V are not space(s), then an
* exception condition is raised: data exception — string data, right truncation.
* <br><br>
* If the declared type of T is binary string and the length in octets M of V is greater than
* the maximum length in octets L of T, then:
* <br>
* 1) If the rightmost M-L octets of V are all equal to X’00’, then the value of T is set to
* the first L octets of V and the length in octets of T is set to L.
* <br>
* 2) If one or more of the rightmost M-L octets of V are not equal to X’00’, then an
* exception condition is raised: data exception — string data, right truncation.
*/
public static <RowT> RowT validateStringTypesOverflowAndTrimIfPossible(
RelDataType rowType,
RowHandler<RowT> rowHandler,
RowT row,
Supplier<RowSchema> schema
) {
boolean containValidatedType =
rowType.getFieldList().stream().anyMatch(t -> STRING_TYPES.contains(t.getType().getSqlTypeName()));
if (!containValidatedType) {
return row;
}
int colCount = rowType.getFieldList().size();
RowBuilder<RowT> rowBldr = null;
for (int i = 0; i < colCount; ++i) {
RelDataType colType = rowType.getFieldList().get(i).getType();
SqlTypeName typeName = colType.getSqlTypeName();
Object data = rowHandler.get(i, row);
if (data == null || (!BINARY_TYPES.contains(typeName) && !CHAR_TYPES.contains(typeName))) {
if (rowBldr != null) {
rowBldr.addField(data);
}
continue;
}
int colPrecision = colType.getPrecision();
assert colPrecision != RelDataType.PRECISION_NOT_SPECIFIED;
// Validate and trim if needed.
if (BINARY_TYPES.contains(typeName)) {
assert data instanceof ByteString;
ByteString byteString = (ByteString) data;
if (byteString.length() > colPrecision) {
for (int pos = byteString.length(); pos > colPrecision; --pos) {
if (byteString.byteAt(pos - 1) != 0) {
throw new SqlException(STMT_VALIDATION_ERR, "Value too long for type: " + colType);
}
}
data = byteString.substring(0, colPrecision);
if (rowBldr == null) {
rowBldr = buildPartialRow(rowHandler, schema, i, row);
}
}
}
if (CHAR_TYPES.contains(typeName)) {
assert data instanceof String;
String str = (String) data;
if (str.length() > colPrecision) {
for (int pos = str.length(); pos > colPrecision; --pos) {
if (str.charAt(pos - 1) != ' ') {
throw new SqlException(STMT_VALIDATION_ERR, "Value too long for type: " + colType);
}
}
data = str.substring(0, colPrecision);
if (rowBldr == null) {
rowBldr = buildPartialRow(rowHandler, schema, i, row);
}
}
}
if (rowBldr != null) {
rowBldr.addField(data);
}
}
if (rowBldr != null) {
return rowBldr.build();
} else {
return row;
}
}
private static <RowT> RowBuilder<RowT> buildPartialRow(RowHandler<RowT> rowHandler, Supplier<RowSchema> schema, int endPos, RowT row) {
RowFactory<RowT> factory = rowHandler.factory(schema.get());
RowBuilder<RowT> bldr = factory.rowBuilder();
for (int i = 0; i < endPos; ++i) {
Object data = rowHandler.get(i, row);
bldr.addField(data);
}
return bldr;
}
/**
* Checks whether or not the given types represent the same column types.
*
* @param lhs Left type.
* @param rhs Right type.
* @return {@code true} if types represent the same {@link ColumnType} after conversion.
*/
// TODO this method can be removed after https://issues.apache.org/jira/browse/IGNITE-22295
public static boolean typesRepresentTheSameColumnTypes(RelDataType lhs, RelDataType rhs) {
ColumnType col1 = columnType(lhs);
ColumnType col2 = columnType(rhs);
return col1 == col2;
}
/**
* Returns {@code true} if the specified type name is
* {@link SqlTypeName#TIMESTAMP} or {@link SqlTypeName#TIMESTAMP_WITH_LOCAL_TIME_ZONE}.
*/
public static boolean isTimestamp(SqlTypeName typeName) {
return typeName == SqlTypeName.TIMESTAMP || typeName == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
}
}
|
apache/qpid-broker-j | 37,798 | systests/qpid-systests-spawn-admin/src/main/java/org/apache/qpid/systests/admin/SpawnBrokerAdmin.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.qpid.systests.admin;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.qpid.systests.Utils.getAmqpManagementFacade;
import static org.apache.qpid.systests.Utils.getJmsProvider;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.nio.file.Files;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.jms.Connection;
import javax.jms.JMSException;
import javax.jms.Session;
import javax.naming.NamingException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.qpid.server.plugin.PluggableService;
import org.apache.qpid.server.util.FileUtils;
import org.apache.qpid.server.util.SystemUtils;
import org.apache.qpid.systests.AmqpManagementFacade;
import org.apache.qpid.tests.utils.BrokerAdmin;
import org.apache.qpid.tests.utils.ConfigItem;
@SuppressWarnings("unused")
@PluggableService
public class SpawnBrokerAdmin implements BrokerAdmin, Closeable
{
private static final Logger LOGGER = LoggerFactory.getLogger(SpawnBrokerAdmin.class);
public static final String SYSTEST_PROPERTY_SPAWN_BROKER_STARTUP_TIME = "qpid.systests.broker_startup_time";
private static final String SYSTEST_PROPERTY_VIRTUALHOSTNODE_TYPE = "virtualhostnode.type";
private static final String SYSTEST_PROPERTY_VIRTUALHOST_BLUEPRINT = "virtualhostnode.context.blueprint";
private static final String SYSTEST_PROPERTY_INITIAL_CONFIGURATION_LOCATION = "qpid.initialConfigurationLocation";
static final String SYSTEST_PROPERTY_BUILD_CLASSPATH_FILE = "qpid.systests.build.classpath.file";
private static final String AMQP_QUEUE_TYPE = "org.apache.qpid.Queue";
private static final String AMQP_NODE_TYPE = "org.apache.qpid.VirtualHostNode";
private static final String AMQP_VIRTUAL_HOST_TYPE = "org.apache.qpid.VirtualHost";
private static final AtomicLong COUNTER = new AtomicLong();
private String _currentWorkDirectory;
private ExecutorService _executorService;
private Process _process;
private Integer _pid;
private List<ListeningPort> _ports;
private boolean _isPersistentStore;
private String _virtualHostNodeName;
private final long _id = COUNTER.incrementAndGet();
@Override
public void beforeTestClass(final Class testClass)
{
startBroker(testClass);
}
@Override
public void beforeTestMethod(final Class testClass, final Method method)
{
final String nodeType = System.getProperty(SYSTEST_PROPERTY_VIRTUALHOSTNODE_TYPE, "JSON");
final String virtualHostNodeName = testClass.getSimpleName() + "_" + method.getName();
final Map<String, Object> attributes = getNodeAttributes(virtualHostNodeName, nodeType);
beforeTestMethod(virtualHostNodeName, nodeType, attributes);
}
@Override
public void afterTestMethod(final Class testClass, final Method method)
{
if (_virtualHostNodeName != null)
{
deleteVirtualHostNode(_virtualHostNodeName);
}
}
@Override
public void afterTestClass(final Class testClass)
{
shutdownBroker();
}
@Override
public InetSocketAddress getBrokerAddress(final PortType portType)
{
if (_ports == null)
{
throw new IllegalArgumentException("Port information not present");
}
Integer port = null;
switch (portType)
{
case AMQP:
for (ListeningPort p : _ports)
{
if (p.getTransport().contains("TCP"))
{
port = p.getPort();
break;
}
}
break;
default:
throw new IllegalArgumentException(String.format("Unknown port type '%s'", portType));
}
if (port == null)
{
throw new IllegalArgumentException(String.format("Cannot find port of type '%s'", portType));
}
return new InetSocketAddress(port);
}
@Override
public void createQueue(final String queueName)
{
invokeManagementOperation(false, (amqpManagementFacade, session) -> {
amqpManagementFacade.createEntityAndAssertResponse(queueName,
AMQP_QUEUE_TYPE,
Collections.emptyMap(),
session);
return null;
});
invokeManagementOperation(false, (amqpManagementFacade, session) -> {
// bind queue to direct exchange automatically
Map<String, Object> arguments = new HashMap<>();
arguments.put("destination", queueName);
arguments.put("bindingKey", queueName);
amqpManagementFacade.performOperationUsingAmqpManagement("amq.direct",
"bind",
session,
"org.apache.qpid.DirectExchange",
arguments);
return null;
});
}
@Override
public void deleteQueue(final String queueName)
{
invokeManagementOperation(false, (amqpManagementFacade, session) -> {
amqpManagementFacade.deleteEntityUsingAmqpManagement(queueName,
session,
AMQP_QUEUE_TYPE);
return null;
});
}
@Override
public void putMessageOnQueue(final String queueName, final String... messages)
{
for (String content : messages)
{
final Map<String, Object> message = new HashMap<>();
message.put("content", content);
message.put("address", queueName);
message.put("mimeType", "text/plain");
invokeManagementOperation(false, (amqpManagementFacade, session) -> {
amqpManagementFacade.performOperationUsingAmqpManagement(_virtualHostNodeName,
"publishMessage",
session,
AMQP_VIRTUAL_HOST_TYPE,
Collections.singletonMap("message",
message));
return null;
});
}
}
@Override
public int getQueueDepthMessages(final String testQueueName)
{
return invokeManagementOperation(false, (amqpManagementFacade, session) -> {
Map<String, Object> arguments = Collections.singletonMap("statistics",
Collections.singletonList("queueDepthMessages"));
Object statistics = amqpManagementFacade.performOperationUsingAmqpManagement(testQueueName,
"getStatistics",
session,
AMQP_QUEUE_TYPE,
arguments);
@SuppressWarnings("unchecked")
Map<String, Object> stats = (Map<String, Object>) statistics;
return ((Number) stats.get("queueDepthMessages")).intValue();
});
}
@Override
public boolean supportsRestart()
{
return _isPersistentStore;
}
@Override
public CompletableFuture<Void> restart()
{
stop();
start();
return CompletableFuture.completedFuture(null);
}
@Override
public boolean isAnonymousSupported()
{
return true;
}
@Override
public boolean isSASLSupported()
{
return true;
}
@Override
public boolean isSASLMechanismSupported(final String mechanismName)
{
return true;
}
@Override
public boolean isWebSocketSupported()
{
return false;
}
@Override
public boolean isQueueDepthSupported()
{
return true;
}
@Override
public boolean isManagementSupported()
{
return true;
}
@Override
public boolean isPutMessageOnQueueSupported()
{
return true;
}
@Override
public boolean isDeleteQueueSupported()
{
return true;
}
@Override
public String getValidUsername()
{
return "admin";
}
@Override
public String getValidPassword()
{
return "admin";
}
@Override
public String getKind()
{
return KIND_BROKER_J;
}
@Override
public String getType()
{
return "SPAWN_BROKER_PER_CLASS";
}
@Override
public void close()
{
shutdownBroker();
}
public void beforeTestMethod(final String virtualHostNodeName,
final String nodeType,
final Map<String, Object> attributes)
{
_isPersistentStore = !"Memory".equals(nodeType);
_virtualHostNodeName = virtualHostNodeName;
if (!attributes.containsKey("qpid-type"))
{
attributes.put("qpid-type", nodeType);
}
invokeManagementOperation(true, ((amqpManagementFacade, session) -> {
amqpManagementFacade.createEntityAndAssertResponse(virtualHostNodeName,
AMQP_NODE_TYPE,
attributes,
session);
return null;
}));
}
public void start()
{
if (_virtualHostNodeName == null)
{
throw new BrokerAdminException("Virtual host is not created");
}
invokeManagementOperation(true, (amqpManagementFacade, session) -> {
amqpManagementFacade.updateEntityUsingAmqpManagementAndReceiveResponse(_virtualHostNodeName,
AMQP_NODE_TYPE,
Collections.singletonMap(
"desiredState",
"ACTIVE"), session);
return null;
});
}
public void stop()
{
if (_virtualHostNodeName == null)
{
throw new BrokerAdminException("Virtual host is not created");
}
invokeManagementOperation(true, (amqpManagementFacade, session) -> {
amqpManagementFacade.updateEntityUsingAmqpManagementAndReceiveResponse(_virtualHostNodeName,
AMQP_NODE_TYPE,
Collections.singletonMap(
"desiredState",
"STOPPED"),
session);
return null;
});
}
public Object awaitAttributeValue(long timeLimitMilliseconds,
boolean isBrokerManagement,
String name,
String type,
String attributeName,
Object... attributeValue)
{
Object value;
long limit = System.currentTimeMillis() + timeLimitMilliseconds;
do
{
value = invokeManagementOperation(isBrokerManagement, ((amqpManagementFacade, session) -> {
Map<String, Object> object = null;
try
{
object = amqpManagementFacade.readEntityUsingAmqpManagement(session, type, name, false);
}
catch (AmqpManagementFacade.OperationUnsuccessfulException e)
{
if (e.getStatusCode() != 404)
{
throw e;
}
}
return object == null ? null : object.get(attributeName);
}));
final Object lookup = value;
if (Arrays.stream(attributeValue).anyMatch(v -> v.equals(lookup)))
{
break;
}
else
{
try
{
Thread.sleep(50);
}
catch (InterruptedException e)
{
Thread.currentThread().interrupt();
break;
}
}
}
while (System.currentTimeMillis() < limit);
return value;
}
public String getVirtualHostName()
{
return _virtualHostNodeName;
}
public void update(final boolean brokerManagement,
final String name,
final String type,
final Map<String, Object> attributes)
{
invokeManagementOperation(brokerManagement, (amqpManagementFacade, session) -> {
amqpManagementFacade.updateEntityUsingAmqpManagement(name,
session,
type,
attributes);
return null;
});
}
public Map<String, Object> getAttributes(final boolean brokerManagement, final String name, final String type)
{
return invokeManagementOperation(brokerManagement,
(amqpManagementFacade, session) -> amqpManagementFacade.readEntityUsingAmqpManagement(
session,
type,
name,
false));
}
public String dumpThreads()
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try
{
Process process = Runtime.getRuntime().exec("jstack " + _pid);
InputStream is = process.getInputStream();
byte[] buffer = new byte[1024];
int length;
while ((length = is.read(buffer)) != -1)
{
baos.write(buffer, 0, length);
}
return new String(baos.toByteArray());
}
catch (Exception e)
{
LOGGER.error("Error whilst collecting thread dump for " + _pid, e);
return "";
}
finally
{
try
{
baos.close();
}
catch (IOException e)
{
// ignore
}
}
}
private Map<String, Object> getNodeAttributes(final String virtualHostNodeName, final String nodeType)
{
String storeDir;
if (System.getProperty("profile", "").startsWith("java-dby-mem"))
{
storeDir = ":memory:";
}
else
{
storeDir = "${qpid.work_dir}" + File.separator + virtualHostNodeName;
}
Map<String, Object> attributes = new HashMap<>();
String blueprint =
System.getProperty(SYSTEST_PROPERTY_VIRTUALHOST_BLUEPRINT, "{\"type\":\"ProvidedStore\"}");
LOGGER.debug("Creating Virtual host {} from blueprint: {}", virtualHostNodeName, blueprint);
attributes.put("name", virtualHostNodeName);
attributes.put("type", nodeType);
attributes.put("qpid-type", nodeType);
String contextAsString;
try
{
contextAsString =
new ObjectMapper().writeValueAsString(Collections.singletonMap("virtualhostBlueprint",
blueprint));
}
catch (JsonProcessingException e)
{
throw new BrokerAdminException("Cannot create virtual host as context serialization failed", e);
}
attributes.put("context", contextAsString);
attributes.put("defaultVirtualHostNode", true);
attributes.put("virtualHostInitialConfiguration", blueprint);
attributes.put("storePath", storeDir);
return attributes;
}
private void deleteVirtualHostNode(final String virtualHostNodeName)
{
invokeManagementOperation(true,
(amqpManagementFacade, session) -> {
amqpManagementFacade.deleteEntityUsingAmqpManagement(virtualHostNodeName,
session,
AMQP_NODE_TYPE);
_virtualHostNodeName = null;
return null;
});
}
private <T> T invokeManagementOperation(boolean isBrokerOperation, AmqpManagementOperation<T> operation)
{
try
{
InetSocketAddress brokerAddress = getBrokerAddress(BrokerAdmin.PortType.AMQP);
final Connection connection = getJmsProvider().getConnectionBuilder()
.setVirtualHost(isBrokerOperation
? "$management"
: getVirtualHostName())
.setClientId("admin-" + UUID.randomUUID().toString())
.setHost(brokerAddress.getHostName())
.setPort(brokerAddress.getPort())
.setFailover(false)
.build();
try
{
connection.start();
final Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
try
{
return operation.invoke(getAmqpManagementFacade(), session);
}
catch (AmqpManagementFacade.OperationUnsuccessfulException | JMSException e)
{
throw new BrokerAdminException("Cannot perform operation", e);
}
finally
{
session.close();
}
}
finally
{
connection.close();
}
}
catch (JMSException | NamingException e)
{
throw new BrokerAdminException("Cannot create connection to broker", e);
}
}
private void startBroker(final Class testClass)
{
String timestamp = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date(System.currentTimeMillis()));
boolean brokerStarted = false;
try
{
_currentWorkDirectory =
Files.createTempDirectory(String.format("qpid-work-%d-%s-%s-",
_id,
testClass.getSimpleName(),
timestamp))
.toString();
String readyLogPattern = "BRK-1004 : Qpid Broker Ready";
LOGGER.debug("Spawning broker working folder: {}", _currentWorkDirectory);
int startUpTime = Integer.getInteger(SYSTEST_PROPERTY_SPAWN_BROKER_STARTUP_TIME, 30000);
LOGGER.debug("Spawning broker permitted start-up time: {}", startUpTime);
ProcessBuilder processBuilder = createBrokerProcessBuilder(_currentWorkDirectory, testClass);
processBuilder.redirectErrorStream(true);
Map<String, String> processEnvironment = processBuilder.environment();
processEnvironment.put("QPID_PNAME", String.format("-DPNAME=QPBRKR -DTNAME=\"%s\"", testClass.getName()));
CountDownLatch readyLatch = new CountDownLatch(1);
long startTime = System.currentTimeMillis();
LOGGER.debug("Starting broker process");
_process = processBuilder.start();
BrokerSystemOutputHandler brokerSystemOutputHandler =
new BrokerSystemOutputHandler(_process.getInputStream(),
readyLatch
);
_executorService = Executors.newFixedThreadPool(1, r -> {
Thread t = new Thread(r, "SPAWN-" + _id);
t.setDaemon(false);
return t;
});
_executorService.submit(brokerSystemOutputHandler);
if (!readyLatch.await(startUpTime, TimeUnit.MILLISECONDS))
{
LOGGER.warn("Spawned broker failed to become ready within {} ms. Ready line '{}'",
startUpTime, readyLogPattern);
throw new BrokerAdminException(String.format(
"Broker failed to become ready within %d ms. Stop line : %s",
startUpTime,
readyLogPattern));
}
_pid = brokerSystemOutputHandler.getPID();
_ports = brokerSystemOutputHandler.getAmqpPorts();
if (_pid == -1)
{
throw new BrokerAdminException("Broker PID is not detected");
}
if (_ports.size() == 0)
{
throw new BrokerAdminException("Broker port is not detected");
}
try
{
int exit = _process.exitValue();
LOGGER.info("broker aborted: {}", exit);
throw new BrokerAdminException("broker aborted: " + exit);
}
catch (IllegalThreadStateException e)
{
// ignore
}
LOGGER.info("Broker was started successfully within {} milliseconds, broker PID {}",
System.currentTimeMillis() - startTime,
_pid);
LOGGER.info("Broker ports: {}", _ports);
brokerStarted = true;
}
catch (RuntimeException e)
{
throw e;
}
catch (InterruptedException e)
{
Thread.currentThread().interrupt();
}
catch (Exception e)
{
throw new BrokerAdminException(String.format("Unexpected exception on broker startup: %s", e), e);
}
finally
{
if (!brokerStarted)
{
LOGGER.warn("Broker failed to start");
if (_process != null)
{
_process.destroy();
_process = null;
}
if (_executorService != null)
{
_executorService.shutdown();
_executorService = null;
}
_ports = null;
_pid = null;
}
}
}
private ProcessBuilder createBrokerProcessBuilder(String currentWorkDirectory, Class testClass) throws IOException
{
String initialConfiguration = System.getProperty(SYSTEST_PROPERTY_INITIAL_CONFIGURATION_LOCATION);
if (initialConfiguration == null)
{
throw new BrokerAdminException(
String.format("No initial configuration is found: JVM property '%s' is not set.",
SYSTEST_PROPERTY_INITIAL_CONFIGURATION_LOCATION));
}
File testInitialConfiguration = new File(currentWorkDirectory, "initial-configuration.json");
if (!testInitialConfiguration.createNewFile())
{
throw new BrokerAdminException("Failed to create a file for a copy of initial configuration");
}
if (initialConfiguration.startsWith("classpath:"))
{
String config = initialConfiguration.substring("classpath:".length());
try (InputStream is = getClass().getClassLoader().getResourceAsStream(config);
OutputStream os = new FileOutputStream(testInitialConfiguration))
{
is.transferTo(os);
}
}
else
{
Files.copy(new File(initialConfiguration).toPath(), testInitialConfiguration.toPath());
}
String classpath;
File file = new File(System.getProperty(SYSTEST_PROPERTY_BUILD_CLASSPATH_FILE));
if (!file.exists())
{
throw new BrokerAdminException(String.format("Cannot find file with classpath: %s",
file.getAbsoluteFile()));
}
else
{
classpath = new String(Files.readAllBytes(file.toPath()), UTF_8);
}
final ConfigItem[] configItems = (ConfigItem[]) testClass.getAnnotationsByType(ConfigItem.class);
List<String> jvmArguments = new ArrayList<>();
jvmArguments.add("java");
jvmArguments.add("-Djava.io.tmpdir=" + escape(System.getProperty("java.io.tmpdir")));
jvmArguments.add("-Dlogback.configurationFile=default-broker-logback.xml");
jvmArguments.add("-Dqpid.tests.mms.messagestore.persistence=true");
jvmArguments.addAll(Arrays.stream(configItems)
.filter(ConfigItem::jvm)
.map(ci -> String.format("-D%s=%s", ci.name(), ci.value()))
.collect(Collectors.toList()));
jvmArguments.add("org.apache.qpid.server.Main");
jvmArguments.add("--store-type");
jvmArguments.add("JSON");
jvmArguments.add("--initial-config-path");
jvmArguments.add(escape(testInitialConfiguration.toString()));
Map<String, String> context = new HashMap<>();
context.put("qpid.work_dir", escape(currentWorkDirectory));
context.put("qpid.port.protocol_handshake_timeout", "1000000");
context.put("qpid.amqp_port", "0");
System.getProperties()
.stringPropertyNames()
.stream()
.filter(n -> n.startsWith("qpid."))
.forEach(n -> context.put(n, System.getProperty(n)));
context.putAll(Arrays.stream(configItems)
.filter(i -> !i.jvm())
.collect(Collectors.toMap(ConfigItem::name,
ConfigItem::value,
(name, value) -> value)));
context.forEach((key, value) -> jvmArguments.addAll(Arrays.asList("-prop",
String.format("%s=%s", key, value))));
LOGGER.debug("Spawning broker JVM :", jvmArguments);
String[] cmd = jvmArguments.toArray(new String[jvmArguments.size()]);
LOGGER.debug("command line:" + String.join(" ", jvmArguments));
ProcessBuilder ps = new ProcessBuilder(cmd);
ps.environment().put("CLASSPATH", classpath);
return ps;
}
private String escape(String value)
{
if (SystemUtils.isWindows() && value.contains("\"") && !value.startsWith("\""))
{
return "\"" + value.replaceAll("\"", "\"\"") + "\"";
}
else
{
return value;
}
}
private void shutdownBroker()
{
try
{
if (SystemUtils.isWindows())
{
doWindowsKill();
}
if (_process != null)
{
LOGGER.info("Destroying broker process");
_process.destroy();
reapChildProcess();
}
}
finally
{
if (_executorService != null)
{
_executorService.shutdown();
_executorService = null;
}
if (_ports != null)
{
_ports.clear();
_ports = null;
}
_pid = null;
_process = null;
if (_currentWorkDirectory != null && Boolean.getBoolean("broker.clean.between.tests"))
{
if (FileUtils.delete(new File(_currentWorkDirectory), true))
{
_currentWorkDirectory = null;
}
}
}
}
private void doWindowsKill()
{
if (_pid != null)
{
try
{
Process p;
p = Runtime.getRuntime().exec(new String[]{"taskkill", "/PID", Integer.toString(_pid), "/T", "/F"});
consumeAllOutput(p);
}
catch (IOException e)
{
LOGGER.error("Error whilst killing process " + _pid, e);
}
}
}
private static void consumeAllOutput(Process p) throws IOException
{
try (InputStreamReader inputStreamReader = new InputStreamReader(p.getInputStream()))
{
try (BufferedReader reader = new BufferedReader(inputStreamReader))
{
String line;
while ((line = reader.readLine()) != null)
{
LOGGER.debug("Consuming output: {}", line);
}
}
}
}
private void reapChildProcess()
{
try
{
_process.waitFor();
LOGGER.info("broker exited: " + _process.exitValue());
}
catch (InterruptedException e)
{
LOGGER.error("Interrupted whilst waiting for process shutdown");
Thread.currentThread().interrupt();
}
finally
{
try
{
_process.getInputStream().close();
_process.getErrorStream().close();
_process.getOutputStream().close();
}
catch (IOException ignored)
{
}
}
}
private static final class BrokerSystemOutputHandler implements Runnable
{
private final Logger LOGGER = LoggerFactory.getLogger(BrokerSystemOutputHandler.class);
private final BufferedReader _in;
private final List<ListeningPort> _amqpPorts;
private final Pattern _readyPattern;
private final Pattern _stoppedPattern;
private final Pattern _pidPattern;
private final Pattern _amqpPortPattern;
private final CountDownLatch _readyLatch;
private volatile boolean _seenReady;
private volatile int _pid;
private BrokerSystemOutputHandler(InputStream in, CountDownLatch readyLatch)
{
_amqpPorts = new ArrayList<>();
_seenReady = false;
_in = new BufferedReader(new InputStreamReader(in));
_readyPattern = Pattern.compile("BRK-1004 : Qpid Broker Ready");
_stoppedPattern = Pattern.compile("BRK-1005 : Stopped");
_amqpPortPattern = Pattern.compile("BRK-1002 : Starting : Listening on (\\w*) port ([0-9]+)");
_pidPattern = Pattern.compile("BRK-1017 : Process : PID : ([0-9]+)");
_readyLatch = readyLatch;
}
@Override
public void run()
{
try
{
String line;
while ((line = _in.readLine()) != null)
{
LOGGER.info(line);
if (!_seenReady)
{
checkPortListeningLog(line, _amqpPortPattern, _amqpPorts);
Matcher pidMatcher = _pidPattern.matcher(line);
if (pidMatcher.find())
{
if (pidMatcher.groupCount() > 0)
{
_pid = Integer.parseInt(pidMatcher.group(1));
}
}
Matcher readyMatcher = _readyPattern.matcher(line);
if (readyMatcher.find())
{
_seenReady = true;
_readyLatch.countDown();
}
}
Matcher stopMatcher = _stoppedPattern.matcher(line);
if (stopMatcher.find())
{
break;
}
if (line.contains("Error:"))
{
break;
}
}
}
catch (IOException e)
{
LOGGER.warn(e.getMessage()
+ " : Broker stream from unexpectedly closed; last log lines written by Broker may be lost.");
}
}
private void checkPortListeningLog(final String line,
final Pattern portPattern,
final List<ListeningPort> ports)
{
Matcher portMatcher = portPattern.matcher(line);
if (portMatcher.find())
{
ports.add(new ListeningPort(portMatcher.group(1),
Integer.parseInt(portMatcher.group(2))));
}
}
int getPID()
{
return _pid;
}
List<ListeningPort> getAmqpPorts()
{
return _amqpPorts;
}
}
private static class ListeningPort
{
private final String _transport;
private final int _port;
ListeningPort(final String transport, final int port)
{
_transport = transport;
_port = port;
}
String getTransport()
{
return _transport;
}
int getPort()
{
return _port;
}
@Override
public String toString()
{
return "ListeningPort{" +
", _transport='" + _transport + '\'' +
", _port=" + _port +
'}';
}
}
private interface AmqpManagementOperation<T>
{
T invoke(AmqpManagementFacade amqpManagementFacade, Session session) throws JMSException;
default <V> AmqpManagementOperation<V> andThen(AmqpManagementOperation<V> after)
{
Objects.requireNonNull(after);
return (amqpManagementFacade, session) -> {
invoke(amqpManagementFacade, session);
return after.invoke(amqpManagementFacade, session);
};
}
}
}
|
apache/cxf | 37,311 | rt/transports/http/src/test/java/org/apache/cxf/transport/https/httpclient/CertificatesToPlayWith.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.cxf.transport.https.httpclient;
/**
* Some X509 certificates to test against.
* <p>
* Note: some of these certificates have Japanese Kanji in the "subjectAlt"
* field (UTF8). Not sure how realistic that is since international characters
* in DNS names usually get translated into ASCII using "xn--" style DNS
* entries. "xn--i8s592g.co.jp" is what FireFox actually uses when trying to
* find 花子.co.jp. So would the CN in the certificate contain
* "xn--i8s592g.co.jp" in ASCII, or "花子.co.jp" in UTF8? (Both?)
* </p>
*
* Copied from httpclient
*/
public final class CertificatesToPlayWith {
/**
* CN=foo.com
*/
public static final byte[] X509_FOO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIERjCCAy6gAwIBAgIJAIz+EYMBU6aQMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzE0MVoXDTI4MTEwNTE1MzE0MVowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgB\n"
+ "hvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYE\n"
+ "FJ8Ud78/OrbKOIJCSBYs2tDLXofYMB8GA1UdIwQYMBaAFHua2o+QmU5S0qzbswNS\n"
+ "yoemDT4NMA0GCSqGSIb3DQEBBQUAA4IBAQC3jRmEya6sQCkmieULcvx8zz1euCk9\n"
+ "fSez7BEtki8+dmfMXe3K7sH0lI8f4jJR0rbSCjpmCQLYmzC3NxBKeJOW0RcjNBpO\n"
+ "c2JlGO9auXv2GDP4IYiXElLJ6VSqc8WvDikv0JmCCWm0Zga+bZbR/EWN5DeEtFdF\n"
+ "815CLpJZNcYwiYwGy/CVQ7w2TnXlG+mraZOz+owr+cL6J/ZesbdEWfjoS1+cUEhE\n"
+ "HwlNrAu8jlZ2UqSgskSWlhYdMTAP9CPHiUv9N7FcT58Itv/I4fKREINQYjDpvQcx\n"
+ "SaTYb9dr5sB4WLNglk7zxDtM80H518VvihTcP7FHL+Gn6g4j5fkI98+S\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=花子.co.jp
*/
public static final byte[] X509_HANAKO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIESzCCAzOgAwIBAgIJAIz+EYMBU6aTMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1NDIxNVoXDTI4MTEwNTE1NDIxNVowgakx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEVMBMGA1UEAwwM6Iqx5a2QLmNvLmpwMSUwIwYJKoZIhvcNAQkB\n"
+ "FhZqdWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\n"
+ "MIIBCgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjU\n"
+ "g4pNjYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQc\n"
+ "wHf0ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t\n"
+ "7iu1JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAn\n"
+ "AxK6q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArD\n"
+ "qUYxqJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwG\n"
+ "CWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNV\n"
+ "HQ4EFgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLS\n"
+ "rNuzA1LKh6YNPg0wDQYJKoZIhvcNAQEFBQADggEBALJ27i3okV/KvlDp6KMID3gd\n"
+ "ITl68PyItzzx+SquF8gahMh016NX73z/oVZoVUNdftla8wPUB1GwIkAnGkhQ9LHK\n"
+ "spBdbRiCj0gMmLCsX8SrjFvr7cYb2cK6J/fJe92l1tg/7Y4o7V/s4JBe/cy9U9w8\n"
+ "a0ctuDmEBCgC784JMDtT67klRfr/2LlqWhlOEq7pUFxRLbhpquaAHSOjmIcWnVpw\n"
+ "9BsO7qe46hidgn39hKh1WjKK2VcL/3YRsC4wUi0PBtFW6ScMCuMhgIRXSPU55Rae\n"
+ "UIlOdPjjr1SUNWGId1rD7W16Scpwnknn310FNxFMHVI0GTGFkNdkilNCFJcIoRA=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=foo.com, subjectAlt=bar.com
*/
public static final byte[] X509_FOO_BAR = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEXDCCA0SgAwIBAgIJAIz+EYMBU6aRMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzYyOVoXDTI4MTEwNTE1MzYyOVowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaOBkDCBjTAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wEgYDVR0RBAswCYIHYmFyLmNvbTANBgkqhkiG9w0BAQUFAAOCAQEA\n"
+ "dQyprNZBmVnvuVWjV42sey/PTfkYShJwy1j0/jcFZR/ypZUovpiHGDO1DgL3Y3IP\n"
+ "zVQ26uhUsSw6G0gGRiaBDe/0LUclXZoJzXX1qpS55OadxW73brziS0sxRgGrZE/d\n"
+ "3g5kkio6IED47OP6wYnlmZ7EKP9cqjWwlnvHnnUcZ2SscoLNYs9rN9ccp8tuq2by\n"
+ "88OyhKwGjJfhOudqfTNZcDzRHx4Fzm7UsVaycVw4uDmhEHJrAsmMPpj/+XRK9/42\n"
+ "2xq+8bc6HojdtbCyug/fvBZvZqQXSmU8m8IVcMmWMz0ZQO8ee3QkBHMZfCy7P/kr\n"
+ "VbWx/uETImUu+NZg22ewEw==\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=foo.com, subjectAlt=bar.com, subjectAlt=花子.co.jp
* (hanako.co.jp in kanji)
*/
public static final byte[] X509_FOO_BAR_HANAKO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEajCCA1KgAwIBAgIJAIz+EYMBU6aSMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzgxM1oXDTI4MTEwNTE1MzgxM1owgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaOBnjCBmzAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wIAYDVR0RBBkwF4IHYmFyLmNvbYIM6Iqx5a2QLmNvLmpwMA0GCSqG\n"
+ "SIb3DQEBBQUAA4IBAQBeZs7ZIYyKtdnVxVvdLgwySEPOE4pBSXii7XYv0Q9QUvG/\n"
+ "++gFGQh89HhABzA1mVUjH5dJTQqSLFvRfqTHqLpxSxSWqMHnvRM4cPBkIRp/XlMK\n"
+ "PlXadYtJLPTgpbgvulA1ickC9EwlNYWnowZ4uxnfsMghW4HskBqaV+PnQ8Zvy3L0\n"
+ "12c7Cg4mKKS5pb1HdRuiD2opZ+Hc77gRQLvtWNS8jQvd/iTbh6fuvTKfAOFoXw22\n"
+ "sWIKHYrmhCIRshUNohGXv50m2o+1w9oWmQ6Dkq7lCjfXfUB4wIbggJjpyEtbNqBt\n"
+ "j4MC2x5rfsLKKqToKmNE7pFEgqwe8//Aar1b+Qj+\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.foo.com
*/
public static final byte[] X509_WILD_FOO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIESDCCAzCgAwIBAgIJAIz+EYMBU6aUMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTU1NVoXDTI4MTEwNTE2MTU1NVowgaYx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczESMBAGA1UEAxQJKi5mb28uY29tMSUwIwYJKoZIhvcNAQkBFhZq\n"
+ "dWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"
+ "CgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pN\n"
+ "jYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0\n"
+ "ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1\n"
+ "JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6\n"
+ "q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYx\n"
+ "qJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wDQYJKoZIhvcNAQEFBQADggEBAH0ipG6J561UKUfgkeW7GvYwW98B\n"
+ "N1ZooWX+JEEZK7+Pf/96d3Ij0rw9ACfN4bpfnCq0VUNZVSYB+GthQ2zYuz7tf/UY\n"
+ "A6nxVgR/IjG69BmsBl92uFO7JTNtHztuiPqBn59pt+vNx4yPvno7zmxsfI7jv0ww\n"
+ "yfs+0FNm7FwdsC1k47GBSOaGw38kuIVWqXSAbL4EX9GkryGGOKGNh0qvAENCdRSB\n"
+ "G9Z6tyMbmfRY+dLSh3a9JwoEcBUso6EWYBakLbq4nG/nvYdYvG9ehrnLVwZFL82e\n"
+ "l3Q/RK95bnA6cuRClGusLad0e6bjkBzx/VQ3VarDEpAkTLUGVAa0CLXtnyc=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.co.jp
*/
public static final byte[] X509_WILD_CO_JP = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIERjCCAy6gAwIBAgIJAIz+EYMBU6aVMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTYzMFoXDTI4MTEwNTE2MTYzMFowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxQHKi5jby5qcDElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgB\n"
+ "hvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYE\n"
+ "FJ8Ud78/OrbKOIJCSBYs2tDLXofYMB8GA1UdIwQYMBaAFHua2o+QmU5S0qzbswNS\n"
+ "yoemDT4NMA0GCSqGSIb3DQEBBQUAA4IBAQA0sWglVlMx2zNGvUqFC73XtREwii53\n"
+ "CfMM6mtf2+f3k/d8KXhLNySrg8RRlN11zgmpPaLtbdTLrmG4UdAHHYr8O4y2BBmE\n"
+ "1cxNfGxxechgF8HX10QV4dkyzp6Z1cfwvCeMrT5G/V1pejago0ayXx+GPLbWlNeZ\n"
+ "S+Kl0m3p+QplXujtwG5fYcIpaGpiYraBLx3Tadih39QN65CnAh/zRDhLCUzKyt9l\n"
+ "UGPLEUDzRHMPHLnSqT1n5UU5UDRytbjJPXzF+l/+WZIsanefWLsxnkgAuZe/oMMF\n"
+ "EJMryEzOjg4Tfuc5qM0EXoPcQ/JlheaxZ40p2IyHqbsWV4MRYuFH4bkM\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.foo.com, subjectAlt=*.bar.com, subjectAlt=*.花子.co.jp
* (*.hanako.co.jp in kanji)
*/
public static final byte[] X509_WILD_FOO_BAR_HANAKO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEcDCCA1igAwIBAgIJAIz+EYMBU6aWMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTczMVoXDTI4MTEwNTE2MTczMVowgaYx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczESMBAGA1UEAxQJKi5mb28uY29tMSUwIwYJKoZIhvcNAQkBFhZq\n"
+ "dWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"
+ "CgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pN\n"
+ "jYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0\n"
+ "ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1\n"
+ "JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6\n"
+ "q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYx\n"
+ "qJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo4GiMIGfMAkGA1UdEwQCMAAwLAYJ\n"
+ "YIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1Ud\n"
+ "DgQWBBSfFHe/Pzq2yjiCQkgWLNrQy16H2DAfBgNVHSMEGDAWgBR7mtqPkJlOUtKs\n"
+ "27MDUsqHpg0+DTAkBgNVHREEHTAbggkqLmJhci5jb22CDiou6Iqx5a2QLmNvLmpw\n"
+ "MA0GCSqGSIb3DQEBBQUAA4IBAQBobWC+D5/lx6YhX64CwZ26XLjxaE0S415ajbBq\n"
+ "DK7lz+Rg7zOE3GsTAMi+ldUYnhyz0wDiXB8UwKXl0SDToB2Z4GOgqQjAqoMmrP0u\n"
+ "WB6Y6dpkfd1qDRUzI120zPYgSdsXjHW9q2H77iV238hqIU7qCvEz+lfqqWEY504z\n"
+ "hYNlknbUnR525ItosEVwXFBJTkZ3Yw8gg02c19yi8TAh5Li3Ad8XQmmSJMWBV4XK\n"
+ "qFr0AIZKBlg6NZZFf/0dP9zcKhzSriW27bY0XfzA6GSiRDXrDjgXq6baRT6YwgIg\n"
+ "pgJsDbJtZfHnV1nd3M6zOtQPm1TIQpNmMMMd/DPrGcUQerD3\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=foo.com, CN=bar.com, CN=花子.co.jp
*/
public static final byte[] X509_THREE_CNS_FOO_BAR_HANAKO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEbzCCA1egAwIBAgIJAIz+EYMBU6aXMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTk0NVoXDTI4MTEwNTE2MTk0NVowgc0x\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAwwHZm9vLmNvbTEQMA4GA1UEAwwHYmFyLmNvbTEV\n"
+ "MBMGA1UEAwwM6Iqx5a2QLmNvLmpwMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyGOv\n"
+ "loI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pNjYGViGjg7zhf\n"
+ "bjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0ZHLN6sD9m2uV\n"
+ "Sp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1JVjTuE0pcBva\n"
+ "h2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6q/wGqcZ3zvFB\n"
+ "TcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYxqJUlPGlMqrKb\n"
+ "3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf\n"
+ "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUnxR3vz86\n"
+ "tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuzA1LKh6YNPg0w\n"
+ "DQYJKoZIhvcNAQEFBQADggEBAGuZb8ai1NO2j4v3y9TLZvd5s0vh5/TE7n7RX+8U\n"
+ "y37OL5k7x9nt0mM1TyAKxlCcY+9h6frue8MemZIILSIvMrtzccqNz0V1WKgA+Orf\n"
+ "uUrabmn+CxHF5gpy6g1Qs2IjVYWA5f7FROn/J+Ad8gJYc1azOWCLQqSyfpNRLSvY\n"
+ "EriQFEV63XvkJ8JrG62b+2OT2lqT4OO07gSPetppdlSa8NBSKP6Aro9RIX1ZjUZQ\n"
+ "SpQFCfo02NO0uNRDPUdJx2huycdNb+AXHaO7eXevDLJ+QnqImIzxWiY6zLOdzjjI\n"
+ "VBMkLHmnP7SjGSQ3XA4ByrQOxfOUTyLyE7NuemhHppuQPxE=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* subjectAlt=foo.com
*/
public static final byte[] X509_NO_CNS_FOO = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIESjCCAzKgAwIBAgIJAIz+EYMBU6aYMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MjYxMFoXDTI4MTEwNTE2MjYxMFowgZIx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczElMCMGCSqGSIb3DQEJARYWanVsaXVzZGF2aWVzQGdtYWlsLmNv\n"
+ "bTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMhjr5aCPoyp0R1iroWA\n"
+ "fnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2BlYho4O84X244QrZTRl8kQbYt\n"
+ "xnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRyzerA/ZtrlUqf+lKo0uWcocxe\n"
+ "Rc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY07hNKXAb2odnVqgzcYiDkLV8\n"
+ "ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8BqnGd87xQU3FVZI4tbtkB+Kz\n"
+ "jD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiVJTxpTKqym93whYk93l3ocEe5\n"
+ "5c0CAwEAAaOBkDCBjTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM\n"
+ "IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUnxR3vz86tso4gkJIFiza\n"
+ "0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuzA1LKh6YNPg0wEgYDVR0RBAsw\n"
+ "CYIHZm9vLmNvbTANBgkqhkiG9w0BAQUFAAOCAQEAjl78oMjzFdsMy6F1sGg/IkO8\n"
+ "tF5yUgPgFYrs41yzAca7IQu6G9qtFDJz/7ehh/9HoG+oqCCIHPuIOmS7Sd0wnkyJ\n"
+ "Y7Y04jVXIb3a6f6AgBkEFP1nOT0z6kjT7vkA5LJ2y3MiDcXuRNMSta5PYVnrX8aZ\n"
+ "yiqVUNi40peuZ2R8mAUSBvWgD7z2qWhF8YgDb7wWaFjg53I36vWKn90ZEti3wNCw\n"
+ "qAVqixM+J0qJmQStgAc53i2aTMvAQu3A3snvH/PHTBo+5UL72n9S1kZyNCsVf1Qo\n"
+ "n8jKTiRriEM+fMFlcgQP284EBFzYHyCXFb9O/hMjK2+6mY9euMB1U1aFFzM/Bg==\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* Intermediate CA for all of these.
*/
public static final byte[] X509_INTERMEDIATE_CA = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEnDCCA4SgAwIBAgIJAJTNwZ6yNa5cMA0GCSqGSIb3DQEBBQUAMIGGMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDAS\n"
+ "BgNVBAsUC2NvbW1vbnNfc3NsMRUwEwYDVQQDFAxkZW1vX3Jvb3RfY2ExJTAjBgkq\n"
+ "hkiG9w0BCQEWFmp1bGl1c2Rhdmllc0BnbWFpbC5jb20wHhcNMDYxMTA1MjE0OTMx\n"
+ "WhcNMDcxMTA1MjE0OTMxWjCBojELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRIw\n"
+ "EAYDVQQHEwlWYW5jb3V2ZXIxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDASBgNV\n"
+ "BAsUC2NvbW1vbnNfc3NsMR0wGwYDVQQDFBRkZW1vX2ludGVybWVkaWF0ZV9jYTEl\n"
+ "MCMGCSqGSIb3DQEJARYWanVsaXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZI\n"
+ "hvcNAQEBBQADggEPADCCAQoCggEBAL0S4y3vUO0EM6lwqOEfK8fvrUprIbsikXaG\n"
+ "XzejcZ+T3l2Dc7t8WtBfRf78i4JypMqJQSijrUicj3H6mOMIReKaXm6ls4hA5d8w\n"
+ "Lhmgiqsz/kW+gA8SeWGWRN683BD/RbQmzOls6ynBvap9jZlthXWBrSIlPCQoBLXY\n"
+ "KVaxGzbL4ezaq+XFMKMQSm2uKwVmHHQNbfmZlPsuendBVomb/ked53Ab9IH6dwwN\n"
+ "qJH9WIrvIzIVEXWlpvQ5MCqozM7u1akU+G8cazr8theGPCaYkzoXnigWua4OjdpV\n"
+ "9z5ZDknhfBzG1AjapdG07FIirwWWgIyZXqZSD96ikmLtwT29qnsCAwEAAaOB7jCB\n"
+ "6zAdBgNVHQ4EFgQUe5raj5CZTlLSrNuzA1LKh6YNPg0wgbsGA1UdIwSBszCBsIAU\n"
+ "rN8eFIvMiRFXXgDqKumS0/W2AhOhgYykgYkwgYYxCzAJBgNVBAYTAkNBMQswCQYD\n"
+ "VQQIEwJCQzEWMBQGA1UEChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9u\n"
+ "c19zc2wxFTATBgNVBAMUDGRlbW9fcm9vdF9jYTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbYIJAJTNwZ6yNa5bMAwGA1UdEwQFMAMBAf8wDQYJ\n"
+ "KoZIhvcNAQEFBQADggEBAIB4KMZvHD20pdKajFtMBpL7X4W4soq6EeTtjml3NYa9\n"
+ "Qc52bsQEGNccKY9afYSBIndaQvFdtmz6HdoN+B8TjYShw2KhyjtKimGLpWYoi1YF\n"
+ "e4aHdmA/Gp5xk8pZzR18FmooxC9RqBux+NAM2iTFSLgDtGIIj4sg2rbn6Bb6ZlQT\n"
+ "1rg6VucXCA1629lNfMeNcu7CBNmUKIdaxHR/YJQallE0KfGRiOIWPrPj/VNk0YA6\n"
+ "XFg0ocjqXJ2/N0N9rWVshMUaXgOh7m4D/5zga5/nuxDU+PoToA6mQ4bV6eCYqZbh\n"
+ "aa1kQYtR9B4ZiG6pB82qVc2dCqStOH2FAEWos2gAVkQ=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* Root CA for all of these.
*/
public static final byte[] X509_ROOT_CA = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIEgDCCA2igAwIBAgIJAJTNwZ6yNa5bMA0GCSqGSIb3DQEBBQUAMIGGMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDAS\n"
+ "BgNVBAsUC2NvbW1vbnNfc3NsMRUwEwYDVQQDFAxkZW1vX3Jvb3RfY2ExJTAjBgkq\n"
+ "hkiG9w0BCQEWFmp1bGl1c2Rhdmllc0BnbWFpbC5jb20wHhcNMDYxMTA1MjEzNjQz\n"
+ "WhcNMjYxMTA1MjEzNjQzWjCBhjELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRYw\n"
+ "FAYDVQQKEw13d3cuY3VjYmMuY29tMRQwEgYDVQQLFAtjb21tb25zX3NzbDEVMBMG\n"
+ "A1UEAxQMZGVtb19yb290X2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZpZXNA\n"
+ "Z21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv+OnocmJ\n"
+ "79UeO2hlCwK+Cle5uZWnU6uwJl+08z5cvebb5tT64WL9+psDbfgUH/Gm9JsuxKTg\n"
+ "w1tZO/4duIgnaLNSx4HoqaTjwigd/hR3TsoGEPXTCkz1ikgTCOEDvl+iMid6aOrd\n"
+ "mViE8HhscxKZ+h5FE7oHZyuT6gFoiaIXhFq+xK2w4ZwDz9L+paiwqywyUJJMnh9U\n"
+ "jKorY+nua81N0oxpIhHPspCanDU4neMzCzYOZyLR/LqV5xORvHcFY84GWMz5hI25\n"
+ "JbgaWJsYKuCAvNsnQwVoqKPGa7x1fn7x6oGsXJaCVt8weUwIj2xwg1lxMhrNaisH\n"
+ "EvKpEAEnGGwWKQIDAQABo4HuMIHrMB0GA1UdDgQWBBSs3x4Ui8yJEVdeAOoq6ZLT\n"
+ "9bYCEzCBuwYDVR0jBIGzMIGwgBSs3x4Ui8yJEVdeAOoq6ZLT9bYCE6GBjKSBiTCB\n"
+ "hjELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRYwFAYDVQQKEw13d3cuY3VjYmMu\n"
+ "Y29tMRQwEgYDVQQLFAtjb21tb25zX3NzbDEVMBMGA1UEAxQMZGVtb19yb290X2Nh\n"
+ "MSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZpZXNAZ21haWwuY29tggkAlM3BnrI1\n"
+ "rlswDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAlPl3/8h1LttR1svC\n"
+ "S8RXbHpAWIT2BEDhGHUNjSmgDQNkE/itf/FCEXh0tlU4bYdtBSOHzflbnzOyIPId\n"
+ "VZeSWs33V38xDFy6KoVg1gT8JxkLmE5S1vWkpsHIlpw/U6r7KD0Kx9FYx5AiXjw0\n"
+ "lzz/zlVNuO2U09KIDwDPVG1mBzQiMiSWj1U1pM4KxINkWQwDy/fvu/I983s8lW5z\n"
+ "hf2WuFNzQN3fcMK5dpBE9NVIu27oYuGYh2sak34v+7T700W2ooBB71qFXtm9P5rl\n"
+ "Yp9RCEsg3KEEPNTtCBs8fROeXvLDrP0cmBIqwGYDuRNCxFDTOdjv6YGdA8nLOjaH\n"
+ "2dDk0g==\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* Below is the private key for all the server certificates above (but
* not the intermediate CA or the root CA). All of those server certs
* came from the same private key.
*/
public static final String RSA_PUBLIC_MODULUS =
"00c863af96823e8ca9d11d62ae85807e713204c1985a80a2747f7ac863c5"
+ "8d82e8c1ecf9698298d4838a4d8d81958868e0ef385f6e3842b653465f24"
+ "41b62dc671a1e204820fe67c82367f80cbcb52586a39bf965cf0141cc077"
+ "f46472cdeac0fd9b6b954a9ffa52a8d2e59ca1cc5e45cefbd4a37c70f1f7"
+ "9c7674ad5d07c78640672e94e31c4e6dee2bb52558d3b84d29701bda8767"
+ "56a83371888390b57c8a5bc49a8356316ae9f1406a913729121621098a77"
+ "713920270312baabfc06a9c677cef1414dc5559238b5bb6407e2b38c3f73"
+ "cfc4020c901f0e3647474dca350e66c4e817c31c0ac3a94631a895253c69"
+ "4caab29bddf085893dde5de87047b9e5cd";
public static final String RSA_PUBLIC_EXPONENT = "65537";
public static final String RSA_PRIVATE_EXPONENT =
"577abd3295553d0efd4d38c13b62a6d03fa7b7e40cce4f1d5071877d96c6"
+ "7a39a63f0f7ab21a89db8acae45587b3ef251309a70f74dc1ac02bde68f3"
+ "8ed658e54e685ed370a18c054449512ea66a2252ed36e82b565b5159ec83"
+ "f23df40ae189550a183865b25fd77789e960f0d8cedcd72f32d7a66edb4b"
+ "a0a2baf3fbeb6c7d75f56ef0af9a7cff1c8c7f297d72eae7982164e50a89"
+ "d450698cf598d39343201094241d2d180a95882a7111e58f4a5bdbc5c125"
+ "a967dd6ed9ec614c5853e88e4c71e8b682a7cf89cb1d82b6fe78cc865084"
+ "c8c5dfbb50c939df2b839c977b0245bfa3615e0592b527b1013d5b675ecb"
+ "44e6b355c1df581f50997175166eef39";
public static final String RSA_PRIME1 =
"00fe759c4f0ce8b763880215e82767e7a937297668f4e4b1e119c6b22a3c"
+ "a2c7b06c547d88d0aa45f645d7d3aeadaf7f8bc594deae0978529592977c"
+ "b1ff890f05033a9e9e15551cad9fbf9c41d12139ccd99c1c3ac7b2197eff"
+ "350d236bb900c1440953b64956e0a058ef824a2e16894af175177c77dbe1"
+ "fef7d8b532608d2513";
public static final String RSA_PRIME2 =
"00c99a45878737a4cf73f9896680b75487f1b669b7686a6ba07103856f31"
+ "db668c2c440c44cdd116f708f631c37a9adf119f5b5cb58ffe3dc62e20af"
+ "af72693d936dc6bb3c5194996468389c1f094079b81522e94572b4ad7d39"
+ "529178e9b8ebaeb1f0fdd83b8731c5223f1dea125341d1d64917f6b1a6ae"
+ "c18d320510d79f859f";
public static final String RSA_EXPONENT1 =
"029febf0d4cd41b7011c2465b4a259bd6118486464c247236f44a169d61e"
+ "47b9062508f674508d5031003ceabc57e714e600d71b2c75d5443db2da52"
+ "6bb45a374f0537c5a1aab3150764ce93cf386c84346a6bd01f6732e42075"
+ "c7a0e9e78a9e73b934e7d871d0f75673820089e129a1604438edcbbeb4e2"
+ "106467da112ce389";
public static final String RSA_EXPONENT2 =
"00827e76650c946afcd170038d32e1f8386ab00d6be78d830efe382e45d4"
+ "7ad4bd04e6231ee22e66740efbf52838134932c9f8c460cdccdec58a1424"
+ "4427859192fd6ab6c58b74e97941b0eaf577f2a11713af5e5952af3ae124"
+ "9a9a892e98410dfa2628d9af668a43b5302fb7d496c9b2fec69f595292b6"
+ "e997f079b0f6314eb7";
public static final String RSA_COEFFICIENT =
"00e6b62add350f1a2a8968903ff76c31cf703b0d7326c4a620aef01225b7"
+ "1640b3f2ec375208c5f7299863f6005b7799b6e529bb1133c8435bf5fdb5"
+ "a786f6cd8a19ee7094a384e6557c600a38845a0960ddbfd1df18d0af5740"
+ "001853788f1b5ccbf9affb4c52c9d2efdb8aab0183d86735b32737fb4e79"
+ "2b8a9c7d91c7d175ae";
/**
* subjectAlt=IP Address:127.0.0.1, email:oleg@ural.ru, DNS:localhost.localdomain
*/
public static final byte[] X509_MULTIPLE_SUBJECT_ALT = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDcTCCAtqgAwIBAgIBATANBgkqhkiG9w0BAQUFADBAMQswCQYDVQQGEwJDSDEL\n"
+ "MAkGA1UECBMCWkgxDzANBgNVBAcTBlp1cmljaDETMBEGA1UEAxMKTXkgVGVzdCBD\n"
+ "QTAeFw0wODEwMzExMTU3NDVaFw0wOTEwMzExMTU3NDVaMGkxCzAJBgNVBAYTAkNI\n"
+ "MRAwDgYDVQQIEwdVbmtub3duMRAwDgYDVQQHEwdVbmtub3duMRAwDgYDVQQKEwdV\n"
+ "bmtub3duMRAwDgYDVQQLEwdVbmtub3duMRIwEAYDVQQDEwlsb2NhbGhvc3QwggG4\n"
+ "MIIBLAYHKoZIzjgEATCCAR8CgYEA/X9TgR11EilS30qcLuzk5/YRt1I870QAwx4/\n"
+ "gLZRJmlFXUAiUftZPY1Y+r/F9bow9subVWzXgTuAHTRv8mZgt2uZUKWkn5/oBHsQ\n"
+ "IsJPu6nX/rfGG/g7V+fGqKYVDwT7g/bTxR7DAjVUE1oWkTL2dfOuK2HXKu/yIgMZ\n"
+ "ndFIAccCFQCXYFCPFSMLzLKSuYKi64QL8Fgc9QKBgQD34aCF1ps93su8q1w2uFe5\n"
+ "eZSvu/o66oL5V0wLPQeCZ1FZV4661FlP5nEHEIGAtEkWcSPoTCgWE7fPCTKMyKbh\n"
+ "PBZ6i1R8jSjgo64eK7OmdZFuo38L+iE1YvH7YnoBJDvMpPG+qFGQiaiD3+Fa5Z8G\n"
+ "kotmXoB7VSVkAUw7/s9JKgOBhQACgYEA6ogAb/YLM1Rz9AoXKW4LA70VtFf7Mqqp\n"
+ "divdu9f72WQc1vMKo1YMf3dQadkMfBYRvAAa1IXDnoiFCHhXnVRkWkoUBJyNebLB\n"
+ "N92CZc0RVFZiMFgQMEh8UldnvAIi4cBk0/YuN3BGl4MzmquVIGrFovdWGqeaveOu\n"
+ "Xcu4lKGJNiqjODA2MDQGA1UdEQQtMCuHBH8AAAGBDG9sZWdAdXJhbC5ydYIVbG9j\n"
+ "YWxob3N0LmxvY2FsZG9tYWluMA0GCSqGSIb3DQEBBQUAA4GBAIgEwIoCSRkU3O7K\n"
+ "USYaOYyfJB9hsvs6YpClvYXiQ/5kPGARP60pM62v4wC7wI9shEizokIAxY2+O3cC\n"
+ "vwuJhNYaa2FJMELIwRN3XES8X8R6JHWbPaRjaAAPhczuEd8SZYy8yiVLmJTgw0gH\n"
+ "BSW775NHlkjsscFVgXkNf0PobqJ9\n"
+ "-----END CERTIFICATE-----").getBytes();
/**
* subject CN=repository.infonotary.com (Multiple AVA in RDN).
*/
public static final byte[] X509_MULTIPLE_VALUE_AVA = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIFxzCCBK+gAwIBAgIIRO/2+/XA7z4wDQYJKoZIhvcNAQEFBQAwgZwxgZkwCQYD\n"
+ "VQQGDAJCRzAVBgNVBAoMDkluZm9Ob3RhcnkgUExDMBcGCgmSJomT8ixkARkWCWRv\n"
+ "bWFpbi1jYTAtBgNVBAMMJmktTm90YXJ5IFRydXN0UGF0aCBWYWxpZGF0ZWQgRG9t\n"
+ "YWluIENBMC0GA1UECwwmaS1Ob3RhcnkgVHJ1c3RQYXRoIFZhbGlkYXRlZCBEb21h\n"
+ "aW4gQ0EwHhcNMTIwNjE4MDg1MzIyWhcNMTMwNjE4MDg1MzIyWjCBxjGBwzAJBgNV\n"
+ "BAYTAkJHMBUGA1UEChMOSW5mb05vdGFyeSBQTEMwFwYDVQQLExBGaWxlcyBSZXBv\n"
+ "c2l0b3J5MBcGCgmSJomT8ixkARkWCWRvbWFpbi1jYTAgBgNVBAMTGXJlcG9zaXRv\n"
+ "cnkuaW5mb25vdGFyeS5jb20wIwYJKoZIhvcNAQkBFhZzdXBwb3J0QGluZm9ub3Rh\n"
+ "cnkuY29tMCYGCSqGSIb3DQEJAhMZcmVwb3NpdG9yeS5pbmZvbm90YXJ5LmNvbTCC\n"
+ "ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALKWjGpgsuz103xVEW/GSg5I\n"
+ "tBoLbXPxockabOTHnOh0VO2sImycyhBH78nMj+VMexn4y+kdCOuJqAA5LApxyhTA\n"
+ "KgKlRN7TfoC90IYHjB1dqLMIseg4YM7Oe0e4Z2nL50bHoqXg7OUHaILUQn7ufpYp\n"
+ "+VCWxyI43KvaR4+HnST3x47wqeArg/rULGV1a16X+46cxq2eoMAcDfostXHaemvz\n"
+ "vg/Wd5xcWfPbF/oY1/sBXH+AK+peVBMen82+3GtAWtNWbyPE3bT4RG+WgKUyfLZ1\n"
+ "7A67rX9DkUEVMPQpa50MpLnrRveiM9w6R3mrMHMHbNnwID0Tqfds5zzOi/7cLD0C\n"
+ "AwEAAaOCAd8wggHbMA4GA1UdDwEB/wQEAwIDuDATBgNVHSUEDDAKBggrBgEFBQcD\n"
+ "ATBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAGGKGh0dHA6Ly9vY3NwLmluZm9u\n"
+ "b3RhcnkuY29tL3Jlc3BvbmRlci5jZ2kwgZAGA1UdIASBiDCBhTCBggYMKwYBBAGB\n"
+ "rQABAgMBMHIwOAYIKwYBBQUHAgEWLGh0dHA6Ly9yZXBvc2l0b3J5LmluZm9ub3Rh\n"
+ "cnkuY29tL2RvbWFpbi5odG1sMDYGCCsGAQUFBwICMCoaKGktTm90YXJ5IFZhbGlk\n"
+ "YXRlZCBEb21haW4gQ2VydGlmaWNhdGUgQ1AwgYkGA1UdHwSBgTB/MDWgL6Athito\n"
+ "dHRwOi8vY3JsLmluZm9ub3RhcnkuY29tL2NybC9kb21haW4tY2EuY3JsgQIBVjBG\n"
+ "oECgPoY8bGRhcDovL2xkYXAuaW5mb25vdGFyeS5jb20vZGM9ZG9tYWluLWNhLGRj\n"
+ "PWluZm9ub3RhcnksZGM9Y29tgQIBVjAPBgNVHRMBAf8EBTADAQEAMB0GA1UdDgQW\n"
+ "BBTImKJZrgV/8n7mHrA0U5EeGsBvbzAfBgNVHSMEGDAWgBTbkorEK+bPdVPpvyVI\n"
+ "PTxGFnuOoDANBgkqhkiG9w0BAQUFAAOCAQEAhsMbqsqvkbfVaKZ+wDY9rX3EtuDS\n"
+ "isdAo4AjmWgTtj/aBGiEiXcIGP312x+0JF+mEEQ75ZOKN+WsM8eLB0F4aqylklk7\n"
+ "6yRYauRXp8dfbXrT3ozxekt0cpSMqbzze456krI12nL+C00V2Iwq96k5J/yZboNW\n"
+ "Q+ibCaEAHNiL4tGVHSHm6znkWvIuUTbDgDEsm5RdafO27suz5H6zMnV+VE6onN1J\n"
+ "I1mQmUs44cg2HZAqnFBpDyJQhNYy8M7yGVaRkbfuVaMqiPa+xDPR5v7NFB3kxRq2\n"
+ "Za2Snopi52eUxDEhJ0MNqFi3Jfj/ZSmJ+XHra5lU4R8lijCAq8SVLZCmIQ==\n"
+ "-----END CERTIFICATE-----").getBytes();
public static final byte[] S_GOOGLE_COM = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIICpzCCAY+gAwIBAgIBATANBgkqhkiG9w0BAQUFADAXMRUwEwYDVQQDDAwqLmdv\n"
+ "b2dsZS5jb20wHhcNMTcwMTEzMjI0OTAzWhcNMTgwMTEzMjI0OTAzWjAXMRUwEwYD\n"
+ "VQQDDAwqLmdvb2dsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB\n"
+ "AQDHuzznuHdJ5PH344xCyGYnUnIRhyLGBKN3WDLLrXWtr/5Sf3Q1qkiMiJ4BINsh\n"
+ "3Xy0z7VvHmMFlntgHXtkofBUPvTihxsVIypRkCZb5hpsWLotR10AW2JpVl/oxLP2\n"
+ "227/36X1zKh33fjImLJl9KzGWHLsbCBleQQJOn7YRsNR/QBZO0XGGkN/R2rRfLF3\n"
+ "rseRfI5gJjZkO0WDxocnf/iieOe0XNR0NAZaY1aozzPmZ/pRrOKYB8OFH7F73WOC\n"
+ "lPIUGai/byJ9SpbXdLUcMlGhml/4XzcnV/WVRD2P/mlY+xEFG3UEy3ufhNnKFJul\n"
+ "yjZrOaKbagamqtOyktzkjnerAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBADaMcwVs\n"
+ "w5kbnoDJzMBJ01H16T4u8k78i/ybwz7u7krgkU0tABXCRj7S/4Dt3jqQ/rV6evj4\n"
+ "gIJ/2kZUp/PHKkV7CxWI48XBTAQUu9LEpxj0Hut3AtNMD9y/J6cFn2978tWsHFHI\n"
+ "mYgvclKUDE4WFMvuxfQVuX3RcGQ5i8khEMczY/KVhZYDcLU1PU0GTTJqqrQm59Z4\n"
+ "T4UyI3OPBR7Nb/kaU1fcgQ083uxRXcNYRMMZnU6c2oFnR+c6pO6aGoXo0C6rgC4R\n"
+ "pOj4hPvHCfZO2xg6HAdQ7UPALLX8pu5KGot7GRc8yiJ/Q1nBEuiPKKu0MIwQoFgP\n"
+ "WUux/APTsgLR7Vc=\n"
+ "-----END CERTIFICATE-----"
).getBytes();
public static final byte[] IP_1_1_1_1 = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIICwjCCAaqgAwIBAgIBATANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9kdW1t\n"
+ "eS12YWx1ZS5jb20wHhcNMTcwMTEzMjI1MTQ2WhcNMTgwMTEzMjI1MTQ2WjAaMRgw\n"
+ "FgYDVQQDEw9kdW1teS12YWx1ZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw\n"
+ "ggEKAoIBAQDfrapp3jHLp1RlElzpR/4sF9AcTYwMF1N+adkHRoVtmTlJV2lTIAjn\n"
+ "QLauy0Kkzv8uxmbID3uROgrFNDQ5RxTTCe+kW/vE6Pyzr5Z5ayjSTKeycTE7mAC4\n"
+ "6ntoCeEWiD593zlfqVo5PuRSp9Kusd+kexNVjC/BETDPa3yXctcH1ouW9GyGItgQ\n"
+ "u4GhCE8cipKMuTltgfK+Gh/5e9lFG9/F2fD+wHUVBULLR3JOQoqwgk2zAwKDwLuS\n"
+ "sEd1CBi35+W3apCKN0SEdTKIAxc/R+O/1j2hpOl9yXCCYyveGwJdFXVZtDcx+9/H\n"
+ "7NXhOdmw/mTXC5fOQGKciEo2SXt8Wp89AgMBAAGjEzARMA8GA1UdEQQIMAaHBAEB\n"
+ "AQEwDQYJKoZIhvcNAQEFBQADggEBAEAO6CE8twpcfdjk9oMjI5nX9GdC5Wt6+ujd\n"
+ "tLj0SbXvMKzCLLkveT0xTEzXfyEo8KW2qYYvPP1h83BIxsbR/J3Swt35UQVofv+4\n"
+ "JgO0FIdgB+iLEcjUh5+60xslylqWE+9bSWm4f06OXuv78tq5NYPZKku/3i4tqLRp\n"
+ "gH2rTtjX7Q4olSS7GdAgfiA2AnDZAbMtxtsnTt/QFpYQqhlkqHVDwgkGP7C8aMBD\n"
+ "RH0UIQCPxUkhwhtNmVyHO42r6oHXselZoVU6XRHuhogrGxPf/pzDUvrKBiJhsZQQ\n"
+ "oEu+pZCwkFLiNwUoq1G2oDpkkdBWB0JcBXB2Txa536ezFFWZYc0=\n"
+ "-----END CERTIFICATE-----"
).getBytes();
public static final byte[] EMAIL_ALT_SUBJECT_NAME = (
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDpTCCAo2gAwIBAgIJANqkMEtlkelbMA0GCSqGSIb3DQEBCwUAMHAxCzAJBgNV\n"
+ "BAYTAlVTMQswCQYDVQQIDAJWQTERMA8GA1UEBwwIU29tZUNpdHkxEjAQBgNVBAoM\n"
+ "CU15Q29tcGFueTETMBEGA1UECwwKTXlEaXZpc2lvbjEYMBYGA1UEAwwPd3d3LmNv\n"
+ "bXBhbnkuY29tMB4XDTE4MDIxNTA3MjkzMFoXDTIwMDIxNTA3MjkzMFowcDELMAkG\n"
+ "A1UEBhMCVVMxCzAJBgNVBAgMAlZBMREwDwYDVQQHDAhTb21lQ2l0eTESMBAGA1UE\n"
+ "CgwJTXlDb21wYW55MRMwEQYDVQQLDApNeURpdmlzaW9uMRgwFgYDVQQDDA93d3cu\n"
+ "Y29tcGFueS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC4v6Oq\n"
+ "Ua0goRVn1cmT7MOpJhXFm3A70bTpvJIRpEjtGIz99hb34/9r5AYyf1VhKyWmBq24\n"
+ "XNcOJ59XOlyjjbm2Tl811ufTOdcNbPadoVBmMt4039OSUFpVb4wAw2XPWLTCG2h1\n"
+ "HNj9GuFHmwcDsg5EiIRrhDGQm2LLLAGoe5PdReoMZCeeWzNWvKTCV14pyRzwQhJL\n"
+ "F1OmzLYzovbPfB8LZVhQgDbLsh034FScivf2oKDB+NEzAEagNpnrFR0MFLWGYsu1\n"
+ "nWD5RiZi78HFGiibmhH7QrEPfGlo2eofuUga6naoBUROqkmMCIL8n1HZ/Ur0oGny\n"
+ "vQCj1AyrfOhuVC53AgMBAAGjQjBAMAsGA1UdDwQEAwIEMDATBgNVHSUEDDAKBggr\n"
+ "BgEFBQcDATAcBgNVHREEFTATgRFlbWFpbEBleGFtcGxlLmNvbTANBgkqhkiG9w0B\n"
+ "AQsFAAOCAQEAZ0IsqRrsEmJ6Fa9Yo6PQtrKJrejN2TTDddVgyLQdokzWh/25JFad\n"
+ "NCMYPH5KjTUyKf96hJDlDayjbKk1PMMhSZMU5OG9NOuGMH/dQttruG1ojse7KIKg\n"
+ "yHDQrfq5Exxgfa7CMHRKAoTCY7JZhSLyVbTMVhmGfuUDad/RA86ZisXycp0ZmS97\n"
+ "qDkAmzFL0sL0ZUWNNUh4ZUWvCUZwiuN08z70NjGqXMTDCf68p3SYxbII0xTfScgf\n"
+ "aQ/A/hD7IbGGTexeoTwpEj01DNvefbQV6//neo32/R5XD0D5jn3TCgZcMThA6H3a\n"
+ "VkEghVg+s7uMfL/UEebOBQWXQJ/uVoknMA==\n"
+ "-----END CERTIFICATE-----"
).getBytes();
private CertificatesToPlayWith() {
//
}
}
|
googleapis/google-cloud-java | 37,749 | java-networkconnectivity/proto-google-cloud-networkconnectivity-v1/src/main/java/com/google/cloud/networkconnectivity/v1/ListHubsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkconnectivity/v1/hub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkconnectivity.v1;
/**
*
*
* <pre>
* Request for
* [HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListHubsRequest}
*/
public final class ListHubsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkconnectivity.v1.ListHubsRequest)
ListHubsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListHubsRequest.newBuilder() to construct.
private ListHubsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListHubsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListHubsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListHubsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListHubsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListHubsRequest.class,
com.google.cloud.networkconnectivity.v1.ListHubsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of results per page to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkconnectivity.v1.ListHubsRequest)) {
return super.equals(obj);
}
com.google.cloud.networkconnectivity.v1.ListHubsRequest other =
(com.google.cloud.networkconnectivity.v1.ListHubsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkconnectivity.v1.ListHubsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.networkconnectivity.v1.ListHubsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkconnectivity.v1.ListHubsRequest)
com.google.cloud.networkconnectivity.v1.ListHubsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListHubsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListHubsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkconnectivity.v1.ListHubsRequest.class,
com.google.cloud.networkconnectivity.v1.ListHubsRequest.Builder.class);
}
// Construct using com.google.cloud.networkconnectivity.v1.ListHubsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkconnectivity.v1.HubProto
.internal_static_google_cloud_networkconnectivity_v1_ListHubsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListHubsRequest getDefaultInstanceForType() {
return com.google.cloud.networkconnectivity.v1.ListHubsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListHubsRequest build() {
com.google.cloud.networkconnectivity.v1.ListHubsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListHubsRequest buildPartial() {
com.google.cloud.networkconnectivity.v1.ListHubsRequest result =
new com.google.cloud.networkconnectivity.v1.ListHubsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkconnectivity.v1.ListHubsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkconnectivity.v1.ListHubsRequest) {
return mergeFrom((com.google.cloud.networkconnectivity.v1.ListHubsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkconnectivity.v1.ListHubsRequest other) {
if (other == com.google.cloud.networkconnectivity.v1.ListHubsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource's name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of results per page to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of results per page to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of results per page to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that filters the list of results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort the results by a certain order.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkconnectivity.v1.ListHubsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkconnectivity.v1.ListHubsRequest)
private static final com.google.cloud.networkconnectivity.v1.ListHubsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkconnectivity.v1.ListHubsRequest();
}
public static com.google.cloud.networkconnectivity.v1.ListHubsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListHubsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListHubsRequest>() {
@java.lang.Override
public ListHubsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListHubsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListHubsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkconnectivity.v1.ListHubsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,889 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/ListUptimeCheckConfigsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/uptime_service.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The protocol for the `ListUptimeCheckConfigs` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListUptimeCheckConfigsRequest}
*/
public final class ListUptimeCheckConfigsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.ListUptimeCheckConfigsRequest)
ListUptimeCheckConfigsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListUptimeCheckConfigsRequest.newBuilder() to construct.
private ListUptimeCheckConfigsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListUptimeCheckConfigsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListUptimeCheckConfigsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.UptimeServiceProto
.internal_static_google_monitoring_v3_ListUptimeCheckConfigsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.UptimeServiceProto
.internal_static_google_monitoring_v3_ListUptimeCheckConfigsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListUptimeCheckConfigsRequest.class,
com.google.monitoring.v3.ListUptimeCheckConfigsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of results to return in a single response. The server
* may further constrain the maximum number of results returned in a single
* page. If the page_size is <=0, the server will decide the number of results
* to be returned.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.ListUptimeCheckConfigsRequest)) {
return super.equals(obj);
}
com.google.monitoring.v3.ListUptimeCheckConfigsRequest other =
(com.google.monitoring.v3.ListUptimeCheckConfigsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.monitoring.v3.ListUptimeCheckConfigsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The protocol for the `ListUptimeCheckConfigs` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListUptimeCheckConfigsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.ListUptimeCheckConfigsRequest)
com.google.monitoring.v3.ListUptimeCheckConfigsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.UptimeServiceProto
.internal_static_google_monitoring_v3_ListUptimeCheckConfigsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.UptimeServiceProto
.internal_static_google_monitoring_v3_ListUptimeCheckConfigsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListUptimeCheckConfigsRequest.class,
com.google.monitoring.v3.ListUptimeCheckConfigsRequest.Builder.class);
}
// Construct using com.google.monitoring.v3.ListUptimeCheckConfigsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.UptimeServiceProto
.internal_static_google_monitoring_v3_ListUptimeCheckConfigsRequest_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.ListUptimeCheckConfigsRequest getDefaultInstanceForType() {
return com.google.monitoring.v3.ListUptimeCheckConfigsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.ListUptimeCheckConfigsRequest build() {
com.google.monitoring.v3.ListUptimeCheckConfigsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.ListUptimeCheckConfigsRequest buildPartial() {
com.google.monitoring.v3.ListUptimeCheckConfigsRequest result =
new com.google.monitoring.v3.ListUptimeCheckConfigsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.monitoring.v3.ListUptimeCheckConfigsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.ListUptimeCheckConfigsRequest) {
return mergeFrom((com.google.monitoring.v3.ListUptimeCheckConfigsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.ListUptimeCheckConfigsRequest other) {
if (other == com.google.monitoring.v3.ListUptimeCheckConfigsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) whose
* Uptime check configurations are listed. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If provided, this field specifies the criteria that must be met by
* uptime checks to be included in the response.
*
* For more details, see [Filtering
* syntax](https://cloud.google.com/monitoring/api/v3/sorting-and-filtering#filter_syntax).
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of results to return in a single response. The server
* may further constrain the maximum number of results returned in a single
* page. If the page_size is <=0, the server will decide the number of results
* to be returned.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of results to return in a single response. The server
* may further constrain the maximum number of results returned in a single
* page. If the page_size is <=0, the server will decide the number of results
* to be returned.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of results to return in a single response. The server
* may further constrain the maximum number of results returned in a single
* page. If the page_size is <=0, the server will decide the number of results
* to be returned.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* If this field is not empty then it must contain the `nextPageToken` value
* returned by a previous call to this method. Using this field causes the
* method to return more results from the previous method call.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.ListUptimeCheckConfigsRequest)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.ListUptimeCheckConfigsRequest)
private static final com.google.monitoring.v3.ListUptimeCheckConfigsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.ListUptimeCheckConfigsRequest();
}
public static com.google.monitoring.v3.ListUptimeCheckConfigsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListUptimeCheckConfigsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListUptimeCheckConfigsRequest>() {
@java.lang.Override
public ListUptimeCheckConfigsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListUptimeCheckConfigsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListUptimeCheckConfigsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.ListUptimeCheckConfigsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,961 | java-policysimulator/proto-google-cloud-policysimulator-v1/src/main/java/com/google/cloud/policysimulator/v1/AccessTuple.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/policysimulator/v1/explanations.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.policysimulator.v1;
/**
*
*
* <pre>
* Information about the principal, resource, and permission to check.
* </pre>
*
* Protobuf type {@code google.cloud.policysimulator.v1.AccessTuple}
*/
public final class AccessTuple extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.policysimulator.v1.AccessTuple)
AccessTupleOrBuilder {
private static final long serialVersionUID = 0L;
// Use AccessTuple.newBuilder() to construct.
private AccessTuple(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AccessTuple() {
principal_ = "";
fullResourceName_ = "";
permission_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AccessTuple();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.policysimulator.v1.ExplanationsProto
.internal_static_google_cloud_policysimulator_v1_AccessTuple_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.policysimulator.v1.ExplanationsProto
.internal_static_google_cloud_policysimulator_v1_AccessTuple_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.policysimulator.v1.AccessTuple.class,
com.google.cloud.policysimulator.v1.AccessTuple.Builder.class);
}
public static final int PRINCIPAL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object principal_ = "";
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The principal.
*/
@java.lang.Override
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for principal.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FULL_RESOURCE_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object fullResourceName_ = "";
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The fullResourceName.
*/
@java.lang.Override
public java.lang.String getFullResourceName() {
java.lang.Object ref = fullResourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
fullResourceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for fullResourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFullResourceNameBytes() {
java.lang.Object ref = fullResourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
fullResourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PERMISSION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object permission_ = "";
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The permission.
*/
@java.lang.Override
public java.lang.String getPermission() {
java.lang.Object ref = permission_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
permission_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for permission.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPermissionBytes() {
java.lang.Object ref = permission_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
permission_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullResourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fullResourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(permission_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, permission_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullResourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, fullResourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(permission_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, permission_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.policysimulator.v1.AccessTuple)) {
return super.equals(obj);
}
com.google.cloud.policysimulator.v1.AccessTuple other =
(com.google.cloud.policysimulator.v1.AccessTuple) obj;
if (!getPrincipal().equals(other.getPrincipal())) return false;
if (!getFullResourceName().equals(other.getFullResourceName())) return false;
if (!getPermission().equals(other.getPermission())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER;
hash = (53 * hash) + getPrincipal().hashCode();
hash = (37 * hash) + FULL_RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getFullResourceName().hashCode();
hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
hash = (53 * hash) + getPermission().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.policysimulator.v1.AccessTuple parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.policysimulator.v1.AccessTuple prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information about the principal, resource, and permission to check.
* </pre>
*
* Protobuf type {@code google.cloud.policysimulator.v1.AccessTuple}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.policysimulator.v1.AccessTuple)
com.google.cloud.policysimulator.v1.AccessTupleOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.policysimulator.v1.ExplanationsProto
.internal_static_google_cloud_policysimulator_v1_AccessTuple_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.policysimulator.v1.ExplanationsProto
.internal_static_google_cloud_policysimulator_v1_AccessTuple_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.policysimulator.v1.AccessTuple.class,
com.google.cloud.policysimulator.v1.AccessTuple.Builder.class);
}
// Construct using com.google.cloud.policysimulator.v1.AccessTuple.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
principal_ = "";
fullResourceName_ = "";
permission_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.policysimulator.v1.ExplanationsProto
.internal_static_google_cloud_policysimulator_v1_AccessTuple_descriptor;
}
@java.lang.Override
public com.google.cloud.policysimulator.v1.AccessTuple getDefaultInstanceForType() {
return com.google.cloud.policysimulator.v1.AccessTuple.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.policysimulator.v1.AccessTuple build() {
com.google.cloud.policysimulator.v1.AccessTuple result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.policysimulator.v1.AccessTuple buildPartial() {
com.google.cloud.policysimulator.v1.AccessTuple result =
new com.google.cloud.policysimulator.v1.AccessTuple(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.policysimulator.v1.AccessTuple result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.principal_ = principal_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.fullResourceName_ = fullResourceName_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.permission_ = permission_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.policysimulator.v1.AccessTuple) {
return mergeFrom((com.google.cloud.policysimulator.v1.AccessTuple) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.policysimulator.v1.AccessTuple other) {
if (other == com.google.cloud.policysimulator.v1.AccessTuple.getDefaultInstance())
return this;
if (!other.getPrincipal().isEmpty()) {
principal_ = other.principal_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFullResourceName().isEmpty()) {
fullResourceName_ = other.fullResourceName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getPermission().isEmpty()) {
permission_ = other.permission_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
principal_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
fullResourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
permission_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object principal_ = "";
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The principal.
*/
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for principal.
*/
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipal(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
principal_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearPrincipal() {
principal_ = getDefaultInstance().getPrincipal();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The principal whose access you want to check, in the form of
* the email address that represents that principal. For example,
* `alice@example.com` or
* `my-service-account@my-project.iam.gserviceaccount.com`.
*
* The principal must be a Google Account or a service account. Other types of
* principals are not supported.
* </pre>
*
* <code>string principal = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipalBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
principal_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object fullResourceName_ = "";
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The fullResourceName.
*/
public java.lang.String getFullResourceName() {
java.lang.Object ref = fullResourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
fullResourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for fullResourceName.
*/
public com.google.protobuf.ByteString getFullResourceNameBytes() {
java.lang.Object ref = fullResourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
fullResourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The fullResourceName to set.
* @return This builder for chaining.
*/
public Builder setFullResourceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
fullResourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFullResourceName() {
fullResourceName_ = getDefaultInstance().getFullResourceName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The full resource name that identifies the resource. For example,
* `//compute.googleapis.com/projects/my-project/zones/us-central1-a/instances/my-instance`.
*
* For examples of full resource names for Google Cloud services, see
* https://cloud.google.com/iam/help/troubleshooter/full-resource-names.
* </pre>
*
* <code>string full_resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for fullResourceName to set.
* @return This builder for chaining.
*/
public Builder setFullResourceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
fullResourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object permission_ = "";
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The permission.
*/
public java.lang.String getPermission() {
java.lang.Object ref = permission_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
permission_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for permission.
*/
public com.google.protobuf.ByteString getPermissionBytes() {
java.lang.Object ref = permission_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
permission_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The permission to set.
* @return This builder for chaining.
*/
public Builder setPermission(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
permission_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearPermission() {
permission_ = getDefaultInstance().getPermission();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The IAM permission to check for the specified principal and
* resource.
*
* For a complete list of IAM permissions, see
* https://cloud.google.com/iam/help/permissions/reference.
*
* For a complete list of predefined IAM roles and the permissions in each
* role, see https://cloud.google.com/iam/help/roles/reference.
* </pre>
*
* <code>string permission = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for permission to set.
* @return This builder for chaining.
*/
public Builder setPermissionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
permission_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.policysimulator.v1.AccessTuple)
}
// @@protoc_insertion_point(class_scope:google.cloud.policysimulator.v1.AccessTuple)
private static final com.google.cloud.policysimulator.v1.AccessTuple DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.policysimulator.v1.AccessTuple();
}
public static com.google.cloud.policysimulator.v1.AccessTuple getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AccessTuple> PARSER =
new com.google.protobuf.AbstractParser<AccessTuple>() {
@java.lang.Override
public AccessTuple parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AccessTuple> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AccessTuple> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.policysimulator.v1.AccessTuple getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,904 | java-automl/proto-google-cloud-automl-v1beta1/src/main/java/com/google/cloud/automl/v1beta1/ListModelsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.automl.v1beta1;
/**
*
*
* <pre>
* Request message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListModelsRequest}
*/
public final class ListModelsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.ListModelsRequest)
ListModelsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListModelsRequest.newBuilder() to construct.
private ListModelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListModelsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListModelsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListModelsRequest.class,
com.google.cloud.automl.v1beta1.ListModelsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 4;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(4, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1beta1.ListModelsRequest)) {
return super.equals(obj);
}
com.google.cloud.automl.v1beta1.ListModelsRequest other =
(com.google.cloud.automl.v1beta1.ListModelsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1beta1.ListModelsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.ListModelsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.ListModelsRequest)
com.google.cloud.automl.v1beta1.ListModelsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.ListModelsRequest.class,
com.google.cloud.automl.v1beta1.ListModelsRequest.Builder.class);
}
// Construct using com.google.cloud.automl.v1beta1.ListModelsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_ListModelsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsRequest getDefaultInstanceForType() {
return com.google.cloud.automl.v1beta1.ListModelsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsRequest build() {
com.google.cloud.automl.v1beta1.ListModelsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsRequest buildPartial() {
com.google.cloud.automl.v1beta1.ListModelsRequest result =
new com.google.cloud.automl.v1beta1.ListModelsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.automl.v1beta1.ListModelsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1beta1.ListModelsRequest) {
return mergeFrom((com.google.cloud.automl.v1beta1.ListModelsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1beta1.ListModelsRequest other) {
if (other == com.google.cloud.automl.v1beta1.ListModelsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 32:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 32
case 50:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project, from which to list the models.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression for filtering the results of the request.
*
* * `model_metadata` - for existence of the case (e.g.
* `video_classification_model_metadata:*`).
* * `dataset_id` - for = or !=. Some examples of using the filter are:
*
* * `image_classification_model_metadata:*` --> The model has
* `image_classification_model_metadata`.
* * `dataset_id=5` --> The model was created from a dataset with ID 5.
* </pre>
*
* <code>string filter = 3;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size.
* </pre>
*
* <code>int32 page_size = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results for the server to return
* Typically obtained via
* [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous
* [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call.
* </pre>
*
* <code>string page_token = 6;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.ListModelsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.ListModelsRequest)
private static final com.google.cloud.automl.v1beta1.ListModelsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.ListModelsRequest();
}
public static com.google.cloud.automl.v1beta1.ListModelsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListModelsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListModelsRequest>() {
@java.lang.Override
public ListModelsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListModelsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListModelsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.ListModelsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,910 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/ListIssueCommentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* The response to list issue comments.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListIssueCommentsResponse}
*/
public final class ListIssueCommentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.ListIssueCommentsResponse)
ListIssueCommentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIssueCommentsResponse.newBuilder() to construct.
private ListIssueCommentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIssueCommentsResponse() {
issueComments_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIssueCommentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssueCommentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssueCommentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.class,
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.Builder.class);
}
public static final int ISSUE_COMMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securesourcemanager.v1.IssueComment> issueComments_;
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securesourcemanager.v1.IssueComment>
getIssueCommentsList() {
return issueComments_;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder>
getIssueCommentsOrBuilderList() {
return issueComments_;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
@java.lang.Override
public int getIssueCommentsCount() {
return issueComments_.size();
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.IssueComment getIssueComments(int index) {
return issueComments_.get(index);
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder getIssueCommentsOrBuilder(
int index) {
return issueComments_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < issueComments_.size(); i++) {
output.writeMessage(1, issueComments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < issueComments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, issueComments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse other =
(com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse) obj;
if (!getIssueCommentsList().equals(other.getIssueCommentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIssueCommentsCount() > 0) {
hash = (37 * hash) + ISSUE_COMMENTS_FIELD_NUMBER;
hash = (53 * hash) + getIssueCommentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response to list issue comments.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListIssueCommentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.ListIssueCommentsResponse)
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssueCommentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssueCommentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.class,
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.Builder.class);
}
// Construct using
// com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (issueCommentsBuilder_ == null) {
issueComments_ = java.util.Collections.emptyList();
} else {
issueComments_ = null;
issueCommentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListIssueCommentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse build() {
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse buildPartial() {
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse result =
new com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse result) {
if (issueCommentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
issueComments_ = java.util.Collections.unmodifiableList(issueComments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.issueComments_ = issueComments_;
} else {
result.issueComments_ = issueCommentsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse other) {
if (other
== com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse.getDefaultInstance())
return this;
if (issueCommentsBuilder_ == null) {
if (!other.issueComments_.isEmpty()) {
if (issueComments_.isEmpty()) {
issueComments_ = other.issueComments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIssueCommentsIsMutable();
issueComments_.addAll(other.issueComments_);
}
onChanged();
}
} else {
if (!other.issueComments_.isEmpty()) {
if (issueCommentsBuilder_.isEmpty()) {
issueCommentsBuilder_.dispose();
issueCommentsBuilder_ = null;
issueComments_ = other.issueComments_;
bitField0_ = (bitField0_ & ~0x00000001);
issueCommentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIssueCommentsFieldBuilder()
: null;
} else {
issueCommentsBuilder_.addAllMessages(other.issueComments_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securesourcemanager.v1.IssueComment m =
input.readMessage(
com.google.cloud.securesourcemanager.v1.IssueComment.parser(),
extensionRegistry);
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
issueComments_.add(m);
} else {
issueCommentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securesourcemanager.v1.IssueComment> issueComments_ =
java.util.Collections.emptyList();
private void ensureIssueCommentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
issueComments_ =
new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.IssueComment>(
issueComments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.IssueComment,
com.google.cloud.securesourcemanager.v1.IssueComment.Builder,
com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder>
issueCommentsBuilder_;
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.IssueComment>
getIssueCommentsList() {
if (issueCommentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(issueComments_);
} else {
return issueCommentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public int getIssueCommentsCount() {
if (issueCommentsBuilder_ == null) {
return issueComments_.size();
} else {
return issueCommentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueComment getIssueComments(int index) {
if (issueCommentsBuilder_ == null) {
return issueComments_.get(index);
} else {
return issueCommentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder setIssueComments(
int index, com.google.cloud.securesourcemanager.v1.IssueComment value) {
if (issueCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssueCommentsIsMutable();
issueComments_.set(index, value);
onChanged();
} else {
issueCommentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder setIssueComments(
int index, com.google.cloud.securesourcemanager.v1.IssueComment.Builder builderForValue) {
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
issueComments_.set(index, builderForValue.build());
onChanged();
} else {
issueCommentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder addIssueComments(com.google.cloud.securesourcemanager.v1.IssueComment value) {
if (issueCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssueCommentsIsMutable();
issueComments_.add(value);
onChanged();
} else {
issueCommentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder addIssueComments(
int index, com.google.cloud.securesourcemanager.v1.IssueComment value) {
if (issueCommentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssueCommentsIsMutable();
issueComments_.add(index, value);
onChanged();
} else {
issueCommentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder addIssueComments(
com.google.cloud.securesourcemanager.v1.IssueComment.Builder builderForValue) {
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
issueComments_.add(builderForValue.build());
onChanged();
} else {
issueCommentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder addIssueComments(
int index, com.google.cloud.securesourcemanager.v1.IssueComment.Builder builderForValue) {
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
issueComments_.add(index, builderForValue.build());
onChanged();
} else {
issueCommentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder addAllIssueComments(
java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.IssueComment> values) {
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, issueComments_);
onChanged();
} else {
issueCommentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder clearIssueComments() {
if (issueCommentsBuilder_ == null) {
issueComments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
issueCommentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public Builder removeIssueComments(int index) {
if (issueCommentsBuilder_ == null) {
ensureIssueCommentsIsMutable();
issueComments_.remove(index);
onChanged();
} else {
issueCommentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueComment.Builder getIssueCommentsBuilder(
int index) {
return getIssueCommentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder getIssueCommentsOrBuilder(
int index) {
if (issueCommentsBuilder_ == null) {
return issueComments_.get(index);
} else {
return issueCommentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder>
getIssueCommentsOrBuilderList() {
if (issueCommentsBuilder_ != null) {
return issueCommentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(issueComments_);
}
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueComment.Builder addIssueCommentsBuilder() {
return getIssueCommentsFieldBuilder()
.addBuilder(com.google.cloud.securesourcemanager.v1.IssueComment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.IssueComment.Builder addIssueCommentsBuilder(
int index) {
return getIssueCommentsFieldBuilder()
.addBuilder(
index, com.google.cloud.securesourcemanager.v1.IssueComment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issue comments.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.IssueComment issue_comments = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.IssueComment.Builder>
getIssueCommentsBuilderList() {
return getIssueCommentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.IssueComment,
com.google.cloud.securesourcemanager.v1.IssueComment.Builder,
com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder>
getIssueCommentsFieldBuilder() {
if (issueCommentsBuilder_ == null) {
issueCommentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.IssueComment,
com.google.cloud.securesourcemanager.v1.IssueComment.Builder,
com.google.cloud.securesourcemanager.v1.IssueCommentOrBuilder>(
issueComments_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
issueComments_ = null;
}
return issueCommentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.ListIssueCommentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.ListIssueCommentsResponse)
private static final com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse();
}
public static com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIssueCommentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIssueCommentsResponse>() {
@java.lang.Override
public ListIssueCommentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIssueCommentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIssueCommentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListIssueCommentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-search | 33,748 | integrationtest/mapper/pojo-base/src/test/java/org/hibernate/search/integrationtest/mapper/pojo/mapping/definition/TypeBridgeBaseIT.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.search.integrationtest.mapper.pojo.mapping.definition;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.lang.invoke.MethodHandles;
import org.hibernate.search.engine.backend.document.DocumentElement;
import org.hibernate.search.engine.backend.document.IndexFieldReference;
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaObjectField;
import org.hibernate.search.engine.backend.types.ObjectStructure;
import org.hibernate.search.engine.search.predicate.definition.PredicateDefinition;
import org.hibernate.search.mapper.pojo.bridge.TypeBridge;
import org.hibernate.search.mapper.pojo.bridge.binding.TypeBindingContext;
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.TypeBinderRef;
import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.TypeBinder;
import org.hibernate.search.mapper.pojo.bridge.runtime.TypeBridgeWriteContext;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.DocumentId;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.TypeBinding;
import org.hibernate.search.mapper.pojo.model.PojoElementAccessor;
import org.hibernate.search.mapper.pojo.standalone.mapping.SearchMapping;
import org.hibernate.search.mapper.pojo.standalone.session.SearchSession;
import org.hibernate.search.util.common.AssertionFailure;
import org.hibernate.search.util.common.SearchException;
import org.hibernate.search.util.impl.integrationtest.common.extension.BackendMock;
import org.hibernate.search.util.impl.integrationtest.common.reporting.FailureReportUtils;
import org.hibernate.search.util.impl.integrationtest.mapper.pojo.standalone.StandalonePojoMappingSetupHelper;
import org.hibernate.search.util.impl.test.annotation.TestForIssue;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
/**
* Test common use cases of (custom) type bridges.
* <p>
* Does not test reindexing in depth; this is tested in {@code AutomaticIndexing*} tests in the ORM mapper.
* <p>
* Does not test custom annotations; this is tested in {@code CustomTypeMappingAnnotationBaseIT}.
*/
@SuppressWarnings("unused")
class TypeBridgeBaseIT {
private static final String INDEX_NAME = "IndexName";
@RegisterExtension
public BackendMock backendMock = BackendMock.create();
@RegisterExtension
public StandalonePojoMappingSetupHelper setupHelper =
StandalonePojoMappingSetupHelper.withBackendMock( MethodHandles.lookup(), backendMock );
/**
* Basic test checking that a "normal" custom type bridge will work as expected
* when relying on accessors.
* <p>
* Note that reindexing is tested in depth in the ORM mapper integration tests.
*/
@Test
@TestForIssue(jiraKey = { "HSEARCH-2055", "HSEARCH-2641" })
void accessors() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
backendMock.expectSchema( INDEX_NAME, b -> b.field( "someField", String.class, b2 -> {
b2.analyzerName( "myAnalyzer" ); // For HSEARCH-2641
} )
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
PojoElementAccessor<String> pojoPropertyAccessor =
context.bridgedElement().property( "stringProperty" )
.createAccessor( String.class );
IndexFieldReference<String> indexFieldReference =
context.indexSchemaElement().field(
"someField",
f -> f.asString().analyzer( "myAnalyzer" )
)
.toReference();
context.bridge(
(DocumentElement target, Object bridgedElement, TypeBridgeWriteContext context1) -> {
target.addValue(
indexFieldReference, pojoPropertyAccessor.read( bridgedElement )
);
} );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
IndexedEntity entity = new IndexedEntity();
entity.id = 1;
entity.stringProperty = "some string";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity );
backendMock.expectWorks( INDEX_NAME )
.add( "1", b -> b.field( "someField", entity.stringProperty ) );
}
backendMock.verifyExpectationsMet();
try ( SearchSession session = mapping.createSession() ) {
entity.stringProperty = "some string 2";
session.indexingPlan().addOrUpdate( entity, new String[] { "stringProperty" } );
backendMock.expectWorks( INDEX_NAME )
.addOrUpdate( "1", b -> b.field( "someField", entity.stringProperty ) );
}
backendMock.verifyExpectationsMet();
}
/**
* Basic test checking that a "normal" custom type bridge will work as expected
* when relying on explicit dependency declaration.
* <p>
* Note that reindexing is tested in depth in the ORM mapper integration tests.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitDependencies() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
backendMock.expectSchema( INDEX_NAME, b -> b.field( "someField", String.class, b2 -> {
b2.analyzerName( "myAnalyzer" ); // For HSEARCH-2641
} )
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies().use( "stringProperty" );
IndexFieldReference<String> indexFieldReference =
context.indexSchemaElement().field(
"someField",
f -> f.asString().analyzer( "myAnalyzer" )
)
.toReference();
context.bridge( IndexedEntity.class,
(DocumentElement target, IndexedEntity bridgedElement,
TypeBridgeWriteContext context1) -> {
target.addValue( indexFieldReference, bridgedElement.stringProperty );
} );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
IndexedEntity entity = new IndexedEntity();
entity.id = 1;
entity.stringProperty = "some string";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity );
backendMock.expectWorks( INDEX_NAME )
.add( "1", b -> b.field( "someField", entity.stringProperty ) );
}
backendMock.verifyExpectationsMet();
try ( SearchSession session = mapping.createSession() ) {
entity.stringProperty = "some string 2";
session.indexingPlan().addOrUpdate( entity, new String[] { "stringProperty" } );
backendMock.expectWorks( INDEX_NAME )
.addOrUpdate( "1", b -> b.field( "someField", entity.stringProperty ) );
}
backendMock.verifyExpectationsMet();
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitDependencies_error_invalidProperty() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies().use( "doesNotExist" );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure( "No readable property named 'doesNotExist' on type '"
+ IndexedEntity.class.getName() + "'" ) );
}
/**
* Basic test checking that a "normal" custom type bridge will work as expected
* when relying on explicit reindexing declaration.
* <p>
* Note that reindexing is tested in depth in the ORM mapper integration tests.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
}
class ContainedEntity {
IndexedEntity parent;
String stringProperty;
}
backendMock.expectSchema( INDEX_NAME, b -> b.field( "someField", String.class, b2 -> {
b2.analyzerName( "myAnalyzer" ); // For HSEARCH-2641
} )
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity( ContainedEntity.class, "parent" )
.use( "stringProperty" );
IndexFieldReference<String> indexFieldReference =
context.indexSchemaElement().field(
"someField",
f -> f.asString().analyzer( "myAnalyzer" )
)
.toReference();
context.bridge( IndexedEntity.class,
(DocumentElement target, IndexedEntity bridgedElement,
TypeBridgeWriteContext context1) -> {
/*
* In a real application this would run a query,
* but we don't have the necessary infrastructure here
* so we'll cut short and just index a constant.
* We just need to know the bridge is executed anyway.
*/
target.addValue( indexFieldReference, "constant" );
} );
} )
)
.setup( IndexedEntity.class, ContainedEntity.class );
backendMock.verifyExpectationsMet();
IndexedEntity entity = new IndexedEntity();
entity.id = 1;
ContainedEntity containedEntity = new ContainedEntity();
containedEntity.parent = entity;
containedEntity.stringProperty = "some string";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity );
session.indexingPlan().add( 1, null, containedEntity );
backendMock.expectWorks( INDEX_NAME )
.add( "1", b -> b.field( "someField", "constant" ) );
}
backendMock.verifyExpectationsMet();
try ( SearchSession session = mapping.createSession() ) {
containedEntity.stringProperty = "some string";
session.indexingPlan().addOrUpdate( 1, null, containedEntity, new String[] { "stringProperty" } );
backendMock.expectWorks( INDEX_NAME )
.addOrUpdate( "1", b -> b.field( "someField", "constant" ) );
}
backendMock.verifyExpectationsMet();
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing_error_use_invalidProperty() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
public Integer getId() {
return id;
}
}
class ContainedEntity {
IndexedEntity parent;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity(
ContainedEntity.class,
"parent"
)
.use( "doesNotExist" );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class, ContainedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure( "No readable property named 'doesNotExist' on type '"
+ ContainedEntity.class.getName() + "'" ) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing_error_fromOtherEntity_invalidProperty() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
}
class ContainedEntity {
IndexedEntity parent;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity(
ContainedEntity.class,
"doesNotExist"
);
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class, ContainedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure( "No readable property named 'doesNotExist' on type '"
+ ContainedEntity.class.getName() + "'" ) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing_error_fromOtherEntity_bridgedElementNotEntityType() {
class NotEntity {
String stringProperty;
public String getStringProperty() {
return stringProperty;
}
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
NotEntity notEntity;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( NotEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity(
IndexedEntity.class,
"doesNotMatter"
);
context.bridge( new UnusedTypeBridge() );
} )
)
.withAnnotatedTypes( NotEntity.class )
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.pathContext( ".notEntity<no value extractors>" )
.failure(
"Invalid use of 'fromOtherEntity': this method can only be used when the bridged element has an entity type,"
+ " but the bridged element has type '" + NotEntity.class.getName() + "',"
+ " which is not an entity type."
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing_error_fromOtherEntity_otherEntityTypeNotEntityType() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
}
class NotEntity {
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity(
NotEntity.class,
"doesNotMatter"
);
context.bridge( new UnusedTypeBridge() );
} )
)
.withAnnotatedTypes( NotEntity.class )
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure(
"Invalid type passed to 'fromOtherEntity': the type must be an entity type",
"Type '" + NotEntity.class.getName() + "' is not an entity type."
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void explicitReindexing_error_fromOtherEntity_inverseAssociationPathTargetsWrongType() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
}
class DifferentEntity {
}
class ContainedEntity {
IndexedEntity parent;
String stringProperty;
DifferentEntity associationToDifferentEntity;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.dependencies()
.fromOtherEntity(
ContainedEntity.class,
"associationToDifferentEntity"
);
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class, ContainedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure(
"The inverse association targets type '" + DifferentEntity.class.getName() + "',"
+ " but a supertype or subtype of '" + IndexedEntity.class.getName() + "' was expected."
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void missingDependencyDeclaration() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
// Do not declare any dependency
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure(
"Incorrect binder implementation",
"the binder did not declare any dependency to the entity model during binding."
+ " Declare dependencies using context.dependencies().use(...) or,"
+ " if the bridge really does not depend on the entity model, context.dependencies().useRootOnly()"
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void inconsistentDependencyDeclaration() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
// Declare no dependency, but also a dependency: this is inconsistent.
context.dependencies()
.use( "stringProperty" )
.useRootOnly();
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure(
"Incorrect binder implementation",
"the binder called context.dependencies().useRootOnly() during binding,"
+ " but also declared extra dependencies to the entity model."
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-3297")
void useRootOnly() {
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
CustomEnum enumProperty;
}
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "enumProperty", b2 -> b2
.field( "someField", String.class )
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( CustomEnum.class )
.binder( (TypeBinder) context -> {
context.dependencies().useRootOnly();
IndexFieldReference<String> indexFieldReference = context.indexSchemaElement().field(
"someField",
f -> f.asString()
)
.toReference();
context.bridge( CustomEnum.class,
(DocumentElement target, CustomEnum bridgedElement,
TypeBridgeWriteContext context1) -> {
// This is a strange way to use bridges,
// but then again a type bridges that only uses the root *is* strange
target.addValue( indexFieldReference, bridgedElement.stringProperty );
} );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
IndexedEntity entity = new IndexedEntity();
entity.id = 1;
entity.enumProperty = CustomEnum.VALUE1;
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity );
backendMock.expectWorks( INDEX_NAME )
.add( "1", b -> b
.objectField( "enumProperty", b2 -> b2
.field( "someField", entity.enumProperty.stringProperty )
)
);
}
backendMock.verifyExpectationsMet();
try ( SearchSession session = mapping.createSession() ) {
entity.enumProperty = CustomEnum.VALUE2;
session.indexingPlan().addOrUpdate( entity, new String[] { "enumProperty" } );
backendMock.expectWorks( INDEX_NAME )
.addOrUpdate( "1", b -> b
.objectField( "enumProperty", b2 -> b2
.field( "someField", entity.enumProperty.stringProperty )
)
);
}
backendMock.verifyExpectationsMet();
}
private enum CustomEnum {
VALUE1( "value1String" ),
VALUE2( "value2String" );
final String stringProperty;
CustomEnum(String stringProperty) {
this.stringProperty = stringProperty;
}
}
/**
* Test that field definitions are forwarded to the backend.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3324")
void field() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
Contained contained;
}
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.field( "stringFromBridge", String.class )
.field( "listFromBridge", Integer.class, b3 -> b3.multiValued( true ) )
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( (TypeBinder) context -> {
context.dependencies().useRootOnly();
// Single-valued field
context.indexSchemaElement()
.field( "stringFromBridge", f -> f.asString() )
.toReference();
// Multi-valued field
context.indexSchemaElement()
.field( "listFromBridge", f -> f.asInteger() )
.multiValued()
.toReference();
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
/**
* Test that object field definitions are forwarded to the backend.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3324")
void objectField() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
Contained contained;
}
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.objectField( "stringFromBridge", b3 -> b3
.field( "value", String.class )
)
.objectField( "listFromBridge", b3 -> b3
.objectStructure( ObjectStructure.NESTED )
.multiValued( true )
.field( "value", Integer.class )
)
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( (TypeBinder) context -> {
context.dependencies().useRootOnly();
// Single-valued field
IndexSchemaObjectField stringObjectField = context.indexSchemaElement()
.objectField( "stringFromBridge" );
stringObjectField.toReference();
stringObjectField.field( "value", f -> f.asString() )
.toReference();
// Multi-valued field
IndexSchemaObjectField listObjectField = context.indexSchemaElement()
.objectField( "listFromBridge", ObjectStructure.NESTED )
.multiValued();
listObjectField.toReference();
listObjectField.field( "value", f -> f.asInteger() )
.toReference();
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
/**
* Test that field template definitions are forwarded to the backend.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3273")
void fieldTemplate() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
Contained contained;
}
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.fieldTemplate( "stringFromBridge", String.class, b3 -> b3
.matchingPathGlob( "*_string" )
)
.fieldTemplate( "listFromBridge", Integer.class, b3 -> b3
.multiValued( true )
.matchingPathGlob( "*_list" )
)
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( (TypeBinder) context -> {
context.dependencies().useRootOnly();
// Single-valued field
context.indexSchemaElement()
.fieldTemplate( "stringFromBridge", f -> f.asString() )
.matchingPathGlob( "*_string" );
// Multi-valued field
context.indexSchemaElement()
.fieldTemplate( "listFromBridge", f -> f.asInteger() )
.matchingPathGlob( "*_list" )
.multiValued();
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
/**
* Test that object field template definitions are forwarded to the backend.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-3273")
void objectFieldTemplate() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
Contained contained;
}
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.objectFieldTemplate( "stringFromBridge", b3 -> b3
.matchingPathGlob( "*_string" )
)
.fieldTemplate( "stringFromBridge_value", String.class, b3 -> b3
.matchingPathGlob( "*_string.value" )
)
.objectFieldTemplate( "listFromBridge", b3 -> b3
.objectStructure( ObjectStructure.NESTED )
.multiValued( true )
.matchingPathGlob( "*_list" )
)
.fieldTemplate( "listFromBridge_value", Integer.class, b3 -> b3
.matchingPathGlob( "*_list.value" )
)
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( (TypeBinder) context -> {
context.dependencies().useRootOnly();
// Single-valued field
context.indexSchemaElement()
.objectFieldTemplate( "stringFromBridge" )
.matchingPathGlob( "*_string" );
context.indexSchemaElement()
.fieldTemplate( "stringFromBridge_value", f -> f.asString() )
.matchingPathGlob( "*_string.value" );
// Multi-valued field
context.indexSchemaElement()
.objectFieldTemplate( "listFromBridge", ObjectStructure.NESTED )
.matchingPathGlob( "*_list" )
.multiValued();
context.indexSchemaElement()
.fieldTemplate( "listFromBridge_value", f -> f.asInteger() )
.matchingPathGlob( "*_list.value" );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
@Test
void accessors_incompatibleRequestedType() {
@Indexed
class IndexedEntity {
@DocumentId
Integer id;
String stringProperty;
}
assertThatThrownBy(
() -> setupHelper.start()
.withConfiguration( b -> b.programmaticMapping().type( IndexedEntity.class )
.binder( (TypeBinder) context -> {
context.bridgedElement().property( "stringProperty" )
.createAccessor( Integer.class );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure( "'.stringProperty<no value extractors>' cannot be assigned to '"
+ Integer.class.getName() + "'" ) );
}
private static class UnusedTypeBridge implements TypeBridge<Object> {
@Override
public void write(DocumentElement target, Object bridgedElement, TypeBridgeWriteContext context) {
throw new AssertionFailure( "Should not be called" );
}
}
@Test
void typeBridge_invalidInputType() {
@Indexed
@TypeBinding(binder = @TypeBinderRef(type = MyTargetTypeBridge.Binder.class))
class IndexedEntity {
@DocumentId
@GenericField
Integer id;
}
assertThatThrownBy( () -> setupHelper.start().expectCustomBeans().setup( IndexedEntity.class ) )
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.failure( "Invalid bridge for input type '" + IndexedEntity.class.getName()
+ "': '" + MyTargetTypeBridge.TOSTRING + "'",
"This bridge expects an input of type '" + TargetType.class.getName() + "'" ) );
}
public static class MyTargetTypeBridge implements TypeBridge<TargetType> {
private static final String TOSTRING = "<MyTargetTypeBridge toString() result>";
@Override
public void write(DocumentElement target, TargetType bridgedElement, TypeBridgeWriteContext context) {
throw new UnsupportedOperationException( "Should not be called" );
}
@Override
public String toString() {
return TOSTRING;
}
public static class Binder implements TypeBinder {
@Override
public void bind(TypeBindingContext context) {
context.dependencies().useRootOnly();
context.bridge( TargetType.class, new MyTargetTypeBridge() );
}
}
}
public interface TargetType {
}
/**
* Test for backward compatibility with 6.0.0.CR1 APIs
*/
@Test
void typeBridge_noGenericType() {
backendMock.expectSchema( INDEX_NAME, b -> b.field( "someField", String.class ) );
SearchMapping mapping = setupHelper.start().expectCustomBeans().setup( IndexedEntityWithRawTypeBridge.class );
backendMock.verifyExpectationsMet();
try ( SearchSession session = mapping.createSession() ) {
IndexedEntityWithRawTypeBridge entity = new IndexedEntityWithRawTypeBridge();
entity.id = 739;
session.indexingPlan().add( entity );
backendMock.expectWorks( INDEX_NAME )
.add( "739", b -> b.field( "someField", "739" ) );
}
backendMock.verifyExpectationsMet();
}
@SuppressWarnings("rawtypes")
public static class RawTypeBridge implements TypeBridge {
private final IndexFieldReference<String> fieldReference;
public RawTypeBridge(IndexFieldReference<String> fieldReference) {
this.fieldReference = fieldReference;
}
@Override
public void write(DocumentElement target, Object bridgedElement, TypeBridgeWriteContext context) {
IndexedEntityWithRawTypeBridge castedBridgedElement = (IndexedEntityWithRawTypeBridge) bridgedElement;
target.addValue( fieldReference, castedBridgedElement.id.toString() );
}
public static class Binder implements TypeBinder {
@Override
@SuppressWarnings("unchecked")
public void bind(TypeBindingContext context) {
context.dependencies().useRootOnly();
IndexFieldReference<String> fieldReference = context.indexSchemaElement().field(
"someField", f -> f.asString() ).toReference();
context.bridge( new RawTypeBridge( fieldReference ) );
}
}
}
@Indexed(index = INDEX_NAME)
@TypeBinding(binder = @TypeBinderRef(type = RawTypeBridge.Binder.class))
private static class IndexedEntityWithRawTypeBridge {
@DocumentId
Integer id;
}
/**
* Test that named predicate definitions are forwarded to the backend.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-4166")
void namedPredicate() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
Contained contained;
}
PredicateDefinition predicateDefinition = context -> {
throw new IllegalStateException( "should not be used" );
};
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.field( "string", String.class, b3 -> {} )
.namedPredicate( "named", b3 -> b3
.predicateDefinition( predicateDefinition )
)
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( context -> {
context.dependencies().useRootOnly();
context.indexSchemaElement()
.field( "string", f -> f.asString() )
.toReference();
context.indexSchemaElement()
.namedPredicate( "named", predicateDefinition );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
/**
* Test that indexed-embedded includePaths filters do not affect named predicates.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-4166")
void namedPredicate_indexedEmbeddedIncludePaths() {
class Contained {
}
@Indexed(index = INDEX_NAME)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded(includePaths = "included")
Contained contained;
}
PredicateDefinition predicateDefinition = context -> {
throw new IllegalStateException( "should not be used" );
};
backendMock.expectSchema( INDEX_NAME, b -> b
.objectField( "contained", b2 -> b2
.field( "included", String.class, b3 -> {} )
.namedPredicate( "named", b3 -> b3
.predicateDefinition( predicateDefinition )
)
)
);
SearchMapping mapping = setupHelper.start().withConfiguration(
b -> b.programmaticMapping().type( Contained.class )
.binder( context -> {
context.dependencies().useRootOnly();
context.indexSchemaElement()
.field( "included", f -> f.asString() )
.toReference();
context.indexSchemaElement()
.field( "excluded", f -> f.asString() )
.toReference();
context.indexSchemaElement()
.namedPredicate( "named", predicateDefinition );
context.bridge( new UnusedTypeBridge() );
} )
)
.setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
}
}
|
oracle/graal | 38,217 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/phases/common/inlining/walker/InliningData.java | /*
* Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.phases.common.inlining.walker;
import static jdk.graal.compiler.core.common.GraalOptions.Intrinsify;
import static jdk.graal.compiler.core.common.GraalOptions.MaximumRecursiveInlining;
import static jdk.graal.compiler.core.common.GraalOptions.MegamorphicInliningMinMethodProbability;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Iterator;
import java.util.LinkedList;
import org.graalvm.collections.EconomicSet;
import org.graalvm.collections.Equivalence;
import jdk.graal.compiler.core.common.NumUtil;
import jdk.graal.compiler.core.common.type.ObjectStamp;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.CounterKey;
import jdk.graal.compiler.debug.DebugContext;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.graph.Graph;
import jdk.graal.compiler.graph.Node;
import jdk.graal.compiler.nodes.CallTargetNode;
import jdk.graal.compiler.nodes.CallTargetNode.InvokeKind;
import jdk.graal.compiler.nodes.Invoke;
import jdk.graal.compiler.nodes.NodeView;
import jdk.graal.compiler.nodes.ParameterNode;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.nodes.ValueNode;
import jdk.graal.compiler.nodes.java.AbstractNewObjectNode;
import jdk.graal.compiler.nodes.java.MethodCallTargetNode;
import jdk.graal.compiler.nodes.spi.CoreProviders;
import jdk.graal.compiler.nodes.virtual.AllocatedObjectNode;
import jdk.graal.compiler.nodes.virtual.VirtualObjectNode;
import jdk.graal.compiler.options.OptionValues;
import jdk.graal.compiler.phases.OptimisticOptimizations;
import jdk.graal.compiler.phases.common.CanonicalizerPhase;
import jdk.graal.compiler.phases.common.inlining.InliningUtil;
import jdk.graal.compiler.phases.common.inlining.info.AssumptionInlineInfo;
import jdk.graal.compiler.phases.common.inlining.info.ExactInlineInfo;
import jdk.graal.compiler.phases.common.inlining.info.InlineInfo;
import jdk.graal.compiler.phases.common.inlining.info.MultiTypeGuardInlineInfo;
import jdk.graal.compiler.phases.common.inlining.info.TypeGuardInlineInfo;
import jdk.graal.compiler.phases.common.inlining.info.elem.Inlineable;
import jdk.graal.compiler.phases.common.inlining.info.elem.InlineableGraph;
import jdk.graal.compiler.phases.common.inlining.policy.InliningPolicy;
import jdk.graal.compiler.phases.tiers.HighTierContext;
import jdk.vm.ci.code.BailoutException;
import jdk.vm.ci.meta.Assumptions.AssumptionResult;
import jdk.vm.ci.meta.JavaTypeProfile;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.meta.ResolvedJavaType;
import jdk.vm.ci.meta.SpeculationLog;
/**
* <p>
* The space of inlining decisions is explored depth-first with the help of a stack realized by
* {@link InliningData}. At any point in time, the topmost element of that stack consists of:
* <ul>
* <li>the callsite under consideration is tracked as a {@link MethodInvocation}.</li>
* <li>one or more {@link CallsiteHolder}s, all of them associated to the callsite above. Why more
* than one? Depending on the type-profile for the receiver more than one concrete method may be
* feasible target.</li>
* </ul>
* </p>
*
* <p>
* The bottom element in the stack consists of:
* <ul>
* <li>a single {@link MethodInvocation} (the {@link MethodInvocation#isRoot root} one, ie the
* unknown caller of the root graph)</li>
* <li>a single {@link CallsiteHolder} (the root one, for the method on which inlining was called)
* </li>
* </ul>
* </p>
*
* @see #moveForward()
*/
public class InliningData {
// Counters
private static final CounterKey counterInliningPerformed = DebugContext.counter("InliningPerformed");
private static final CounterKey counterInliningRuns = DebugContext.counter("InliningRuns");
private static final CounterKey counterInliningConsidered = DebugContext.counter("InliningConsidered");
/**
* Call hierarchy from outer most call (i.e., compilation unit) to inner most callee.
*/
private final ArrayDeque<CallsiteHolder> graphQueue = new ArrayDeque<>();
private final ArrayDeque<MethodInvocation> invocationQueue = new ArrayDeque<>();
private final HighTierContext context;
private final int maxMethodPerInlining;
private final CanonicalizerPhase canonicalizer;
private final InliningPolicy inliningPolicy;
private final StructuredGraph rootGraph;
private final DebugContext debug;
private int maxGraphs;
public InliningData(StructuredGraph rootGraph, HighTierContext context, int maxMethodPerInlining, CanonicalizerPhase canonicalizer, InliningPolicy inliningPolicy, LinkedList<Invoke> rootInvokes) {
assert rootGraph != null;
this.context = context;
this.maxMethodPerInlining = maxMethodPerInlining;
this.canonicalizer = canonicalizer;
this.inliningPolicy = inliningPolicy;
this.maxGraphs = 1;
this.rootGraph = rootGraph;
this.debug = rootGraph.getDebug();
invocationQueue.push(new MethodInvocation(null, 1.0, 1.0, null));
graphQueue.push(new CallsiteHolderExplorable(rootGraph, 1.0, 1.0, null, rootInvokes));
}
public static boolean isFreshInstantiation(ValueNode arg) {
return (arg instanceof AbstractNewObjectNode) || (arg instanceof AllocatedObjectNode) || (arg instanceof VirtualObjectNode);
}
private String checkTargetConditionsHelper(ResolvedJavaMethod method, Invoke invoke) {
OptionValues options = rootGraph.getOptions();
if (method == null) {
return "the method is not resolved";
} else if (method.isNative() && !(Intrinsify.getValue(options) &&
context.getReplacements().getInlineSubstitution(method, invoke.bci(), invoke.isInOOMETry(), invoke.getInlineControl(), rootGraph.trackNodeSourcePosition(), null,
rootGraph.allowAssumptions(), options) != null)) {
// We have conditional intrinsic, e.g., String.intern, which may not have inlineable
// graph depending on the context. The getInlineSubstitution test ensures the inlineable
// graph is present.
return "it is a non-intrinsic native method";
} else if (method.isAbstract()) {
return "it is an abstract method";
} else if (!method.getDeclaringClass().isInitialized()) {
return "the method's class is not initialized";
} else if (!method.canBeInlined()) {
return "it is marked non-inlinable";
} else if (countRecursiveInlining(method) > MaximumRecursiveInlining.getValue(options)) {
return "it exceeds the maximum recursive inlining depth";
} else {
if (new OptimisticOptimizations(rootGraph.getProfilingInfo(method), options).lessOptimisticThan(context.getOptimisticOptimizations())) {
return "the callee uses less optimistic optimizations than caller";
} else {
return null;
}
}
}
private boolean checkTargetConditions(Invoke invoke, ResolvedJavaMethod method) {
final String failureMessage = checkTargetConditionsHelper(method, invoke);
if (failureMessage == null) {
return true;
} else {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), method, failureMessage);
invoke.asNode().graph().notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), failureMessage);
return false;
}
}
/**
* Determines if inlining is possible at the given invoke node.
*
* @param invoke the invoke that should be inlined
* @return an instance of InlineInfo, or null if no inlining is possible at the given invoke
*/
private InlineInfo getInlineInfo(Invoke invoke) {
final String failureMessage = InliningUtil.checkInvokeConditions(invoke);
if (failureMessage != null) {
InliningUtil.logNotInlinedMethod(invoke, failureMessage);
return null;
}
MethodCallTargetNode callTarget = (MethodCallTargetNode) invoke.callTarget();
ResolvedJavaMethod targetMethod = callTarget.targetMethod();
InvokeKind invokeKind = callTarget.invokeKind();
if (invokeKind == CallTargetNode.InvokeKind.Special || invokeKind == CallTargetNode.InvokeKind.Static || targetMethod.canBeStaticallyBound()) {
return getExactInlineInfo(invoke, targetMethod);
}
assert invokeKind.isIndirect();
ResolvedJavaType holder = targetMethod.getDeclaringClass();
if (!(callTarget.receiver().stamp(NodeView.DEFAULT) instanceof ObjectStamp)) {
return null;
}
ObjectStamp receiverStamp = (ObjectStamp) callTarget.receiver().stamp(NodeView.DEFAULT);
if (receiverStamp.alwaysNull()) {
// Don't inline if receiver is known to be null
return null;
}
ResolvedJavaType contextType = invoke.getContextType();
if (receiverStamp.type() != null) {
// the invoke target might be more specific than the holder (happens after inlining:
// parameters lose their declared type...)
ResolvedJavaType receiverType = receiverStamp.type();
if (receiverType != null && holder.isAssignableFrom(receiverType)) {
holder = receiverType;
if (receiverStamp.isExactType()) {
assert targetMethod.getDeclaringClass().isAssignableFrom(holder) : holder + " subtype of " + targetMethod.getDeclaringClass() + " for " + targetMethod;
ResolvedJavaMethod resolvedMethod = holder.resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null) {
return getExactInlineInfo(invoke, resolvedMethod);
}
}
}
}
if (holder.isArray()) {
// arrays can be treated as Objects
ResolvedJavaMethod resolvedMethod = holder.resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null) {
return getExactInlineInfo(invoke, resolvedMethod);
}
}
if (invokeKind != InvokeKind.Interface) {
AssumptionResult<ResolvedJavaType> leafConcreteSubtype = holder.findLeafConcreteSubtype();
if (leafConcreteSubtype != null) {
ResolvedJavaMethod resolvedMethod = leafConcreteSubtype.getResult().resolveConcreteMethod(targetMethod, contextType);
if (resolvedMethod != null && leafConcreteSubtype.canRecordTo(callTarget.graph().getAssumptions())) {
return getAssumptionInlineInfo(invoke, resolvedMethod, leafConcreteSubtype);
}
}
AssumptionResult<ResolvedJavaMethod> concrete = holder.findUniqueConcreteMethod(targetMethod);
if (concrete != null && concrete.canRecordTo(callTarget.graph().getAssumptions())) {
return getAssumptionInlineInfo(invoke, concrete.getResult(), concrete);
}
}
// type check based inlining
return getTypeCheckedInlineInfo(invoke, targetMethod);
}
private InlineInfo getTypeCheckedInlineInfo(Invoke invoke, ResolvedJavaMethod targetMethod) {
StructuredGraph graph = invoke.asNode().graph();
JavaTypeProfile typeProfile = ((MethodCallTargetNode) invoke.callTarget()).getTypeProfile();
if (typeProfile == null) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "no type profile exists");
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "no type profile exists");
return null;
}
JavaTypeProfile.ProfiledType[] ptypes = typeProfile.getTypes();
if (ptypes == null || ptypes.length <= 0) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "no types in profile");
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "no types in profile");
return null;
}
ResolvedJavaType contextType = invoke.getContextType();
double notRecordedTypeProbability = typeProfile.getNotRecordedProbability();
final OptimisticOptimizations optimisticOpts = context.getOptimisticOptimizations();
OptionValues options = invoke.asNode().getOptions();
boolean speculationFailed = false;
SpeculationLog speculationLog = graph.getSpeculationLog();
SpeculationLog.Speculation speculation = SpeculationLog.NO_SPECULATION;
if (speculationLog != null && notRecordedTypeProbability == 0) {
SpeculationLog.SpeculationReason speculationReason = InliningUtil.createSpeculation(invoke, typeProfile);
if (speculationLog.maySpeculate(speculationReason)) {
speculation = speculationLog.speculate(speculationReason);
} else {
speculationFailed = true;
}
}
if (ptypes.length == 1 && notRecordedTypeProbability == 0 && !speculationFailed) {
if (!optimisticOpts.inlineMonomorphicCalls(options)) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "inlining monomorphic calls is disabled");
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "inlining monomorphic calls is disabled");
return null;
}
ResolvedJavaType type = ptypes[0].getType();
assert type.isArray() || type.isConcrete();
ResolvedJavaMethod concrete = type.resolveConcreteMethod(targetMethod, contextType);
if (!checkTargetConditions(invoke, concrete)) {
return null;
}
return new TypeGuardInlineInfo(invoke, concrete, type, speculation);
} else {
invoke.setPolymorphic(true);
if (!optimisticOpts.inlinePolymorphicCalls(options) && notRecordedTypeProbability == 0) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "inlining polymorphic calls is disabled (%d types)", ptypes.length);
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "inlining polymorphic calls is disabled (%d types)", ptypes.length);
return null;
}
if (!optimisticOpts.inlineMegamorphicCalls(options) && notRecordedTypeProbability > 0) {
// due to filtering impossible types, notRecordedTypeProbability can be > 0 although
// the number of types is lower than what can be recorded in a type profile
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "inlining megamorphic calls is disabled (%d types, %f %% not recorded types)", ptypes.length,
notRecordedTypeProbability * 100);
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null,
invoke.getTargetMethod(), "inlining megamorphic calls is disabled (%d types, %f %% not recorded types)", ptypes.length, notRecordedTypeProbability);
return null;
}
// Find unique methods and their probabilities.
ArrayList<ResolvedJavaMethod> concreteMethods = new ArrayList<>();
ArrayList<Double> concreteMethodsProbabilities = new ArrayList<>();
for (int i = 0; i < ptypes.length; i++) {
ResolvedJavaMethod concrete = ptypes[i].getType().resolveConcreteMethod(targetMethod, contextType);
if (concrete == null) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "could not resolve method");
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "could not resolve method");
return null;
}
int index = concreteMethods.indexOf(concrete);
double curProbability = ptypes[i].getProbability();
if (index < 0) {
index = concreteMethods.size();
concreteMethods.add(concrete);
concreteMethodsProbabilities.add(curProbability);
} else {
concreteMethodsProbabilities.set(index, concreteMethodsProbabilities.get(index) + curProbability);
}
}
// Clear methods that fall below the threshold.
if (notRecordedTypeProbability > 0) {
ArrayList<ResolvedJavaMethod> newConcreteMethods = new ArrayList<>();
ArrayList<Double> newConcreteMethodsProbabilities = new ArrayList<>();
for (int i = 0; i < concreteMethods.size(); ++i) {
if (concreteMethodsProbabilities.get(i) >= MegamorphicInliningMinMethodProbability.getValue(options)) {
newConcreteMethods.add(concreteMethods.get(i));
newConcreteMethodsProbabilities.add(concreteMethodsProbabilities.get(i));
}
}
if (newConcreteMethods.isEmpty()) {
// No method left that is worth inlining.
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "no methods remaining after filtering less frequent methods (%d methods previously)",
concreteMethods.size());
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null,
invoke.getTargetMethod(), "no methods remaining after filtering less frequent methods (%d methods previously)", concreteMethods.size());
return null;
}
concreteMethods = newConcreteMethods;
concreteMethodsProbabilities = newConcreteMethodsProbabilities;
}
if (concreteMethods.size() > maxMethodPerInlining) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "polymorphic call with more than %d target methods", maxMethodPerInlining);
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "polymorphic call with more than %d target methods", maxMethodPerInlining);
return null;
}
// Clean out types whose methods are no longer available.
ArrayList<JavaTypeProfile.ProfiledType> usedTypes = new ArrayList<>();
ArrayList<Integer> typesToConcretes = new ArrayList<>();
for (JavaTypeProfile.ProfiledType type : ptypes) {
ResolvedJavaMethod concrete = type.getType().resolveConcreteMethod(targetMethod, contextType);
int index = concreteMethods.indexOf(concrete);
if (index == -1) {
notRecordedTypeProbability += type.getProbability();
} else {
assert type.getType().isArray() || !type.getType().isAbstract() : type + " " + concrete;
usedTypes.add(type);
typesToConcretes.add(index);
}
}
if (usedTypes.isEmpty()) {
// No type left that is worth checking for.
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "no types remaining after filtering less frequent types (%d types previously)", ptypes.length);
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null, invoke.getTargetMethod(), "no types remaining after filtering less frequent types (%d types previously)",
ptypes.length);
return null;
}
for (ResolvedJavaMethod concrete : concreteMethods) {
if (!checkTargetConditions(invoke, concrete)) {
InliningUtil.traceNotInlinedMethod(invoke, inliningDepth(), targetMethod, "it is a polymorphic method call and at least one invoked method cannot be inlined");
graph.notifyInliningDecision(invoke, false, "InliningPhase", null, null, null,
invoke.getTargetMethod(), "it is a polymorphic method call and at least one invoked method cannot be inlined");
return null;
}
}
return new MultiTypeGuardInlineInfo(invoke, concreteMethods, usedTypes, typesToConcretes, notRecordedTypeProbability, speculationFailed, speculation);
}
}
private InlineInfo getAssumptionInlineInfo(Invoke invoke, ResolvedJavaMethod concrete, AssumptionResult<?> takenAssumption) {
assert concrete.isConcrete();
if (checkTargetConditions(invoke, concrete)) {
return new AssumptionInlineInfo(invoke, concrete, takenAssumption);
}
return null;
}
private InlineInfo getExactInlineInfo(Invoke invoke, ResolvedJavaMethod targetMethod) {
assert targetMethod.isConcrete();
if (checkTargetConditions(invoke, targetMethod)) {
return new ExactInlineInfo(invoke, targetMethod);
}
return null;
}
@SuppressWarnings("try")
private void doInline(CallsiteHolderExplorable callerCallsiteHolder, MethodInvocation calleeInvocation, String reason) {
StructuredGraph callerGraph = callerCallsiteHolder.graph();
InlineInfo calleeInfo = calleeInvocation.callee();
try {
try (DebugContext.Scope scope = debug.scope("doInline", callerGraph)) {
EconomicSet<Node> canonicalizedNodes = EconomicSet.create(Equivalence.IDENTITY);
canonicalizedNodes.addAll(calleeInfo.invoke().asNode().usages());
EconomicSet<Node> parameterUsages = calleeInfo.inline(context.getProviders(), reason);
canonicalizedNodes.addAll(parameterUsages);
counterInliningRuns.increment(debug);
debug.dump(DebugContext.DETAILED_LEVEL, callerGraph, "after %s", calleeInfo);
Graph.Mark markBeforeCanonicalization = callerGraph.getMark();
canonicalizer.applyIncremental(callerGraph, context, canonicalizedNodes);
// process invokes that are possibly created during canonicalization
for (Node newNode : callerGraph.getNewNodes(markBeforeCanonicalization)) {
if (newNode instanceof Invoke) {
callerCallsiteHolder.pushInvoke((Invoke) newNode);
}
}
callerCallsiteHolder.computeProbabilities();
counterInliningPerformed.increment(debug);
}
} catch (BailoutException bailout) {
throw bailout;
} catch (AssertionError | RuntimeException e) {
throw new GraalError(e).addContext(calleeInfo.toString());
} catch (GraalError e) {
throw e.addContext(calleeInfo.toString());
} catch (Throwable e) {
throw debug.handle(e);
}
}
/**
*
* This method attempts:
* <ol>
* <li>to inline at the callsite given by <code>calleeInvocation</code>, where that callsite
* belongs to the {@link CallsiteHolderExplorable} at the top of the {@link #graphQueue}
* maintained in this class.</li>
* <li>otherwise, to devirtualize the callsite in question.</li>
* </ol>
*
* @return true iff inlining was actually performed
*/
private boolean tryToInline(MethodInvocation calleeInvocation, int inliningDepth) {
CallsiteHolderExplorable callerCallsiteHolder = (CallsiteHolderExplorable) currentGraph();
InlineInfo calleeInfo = calleeInvocation.callee();
assert callerCallsiteHolder.containsInvoke(calleeInfo.invoke());
counterInliningConsidered.increment(debug);
InliningPolicy.Decision decision = inliningPolicy.isWorthInlining(context.getReplacements(), calleeInvocation, calleeInfo, inliningDepth, true);
if (decision.shouldInline()) {
doInline(callerCallsiteHolder, calleeInvocation, decision.getReason());
return true;
}
if (context.getOptimisticOptimizations().devirtualizeInvokes(calleeInfo.graph().getOptions())) {
calleeInfo.tryToDevirtualizeInvoke(context.getProviders());
}
return false;
}
/**
* This method picks one of the callsites belonging to the current
* {@link CallsiteHolderExplorable}. Provided the callsite qualifies to be analyzed for
* inlining, this method prepares a new stack top in {@link InliningData} for such callsite,
* which comprises:
* <ul>
* <li>preparing a summary of feasible targets, ie preparing an {@link InlineInfo}</li>
* <li>based on it, preparing the stack top proper which consists of:</li>
* <ul>
* <li>one {@link MethodInvocation}</li>
* <li>a {@link CallsiteHolder} for each feasible target</li>
* </ul>
* </ul>
*
* <p>
* The thus prepared "stack top" is needed by {@link #moveForward()} to explore the space of
* inlining decisions (each decision one of: backtracking, delving, inlining).
* </p>
*
* <p>
* The {@link InlineInfo} used to get things rolling is kept around in the
* {@link MethodInvocation}, it will be needed in case of inlining, see
* {@link InlineInfo#inline(CoreProviders, String)}
* </p>
*/
private void processNextInvoke() {
CallsiteHolderExplorable callsiteHolder = (CallsiteHolderExplorable) currentGraph();
Invoke invoke = callsiteHolder.popInvoke();
InlineInfo info = getInlineInfo(invoke);
if (info != null) {
info.populateInlinableElements(context, currentGraph().graph(), canonicalizer, rootGraph.getOptions());
double invokeProbability = callsiteHolder.invokeProbability(invoke);
double invokeRelevance = callsiteHolder.invokeRelevance(invoke);
MethodInvocation methodInvocation = new MethodInvocation(info, invokeProbability, invokeRelevance, freshlyInstantiatedArguments(invoke, callsiteHolder.getFixedParams()));
pushInvocationAndGraphs(methodInvocation);
}
}
/**
* Gets the freshly instantiated arguments.
* <p>
* A freshly instantiated argument is either:
* <uL>
* <li>an {@link InliningData#isFreshInstantiation(ValueNode)}</li>
* <li>a fixed-param, ie a {@link ParameterNode} receiving a freshly instantiated argument</li>
* </uL>
* </p>
*
* @return the positions of freshly instantiated arguments in the argument list of the
* <code>invoke</code>, or null if no such positions exist.
*/
public static BitSet freshlyInstantiatedArguments(Invoke invoke, EconomicSet<ParameterNode> fixedParams) {
assert fixedParams != null;
assert paramsAndInvokeAreInSameGraph(invoke, fixedParams);
BitSet result = null;
int argIdx = 0;
for (ValueNode arg : invoke.callTarget().arguments()) {
assert arg != null;
if (isFreshInstantiation(arg) || (arg instanceof ParameterNode && fixedParams.contains((ParameterNode) arg))) {
if (result == null) {
result = new BitSet();
}
result.set(argIdx);
}
argIdx++;
}
return result;
}
private static boolean paramsAndInvokeAreInSameGraph(Invoke invoke, EconomicSet<ParameterNode> fixedParams) {
if (fixedParams.isEmpty()) {
return true;
}
for (ParameterNode p : fixedParams) {
if (p.graph() != invoke.asNode().graph()) {
return false;
}
}
return true;
}
public int graphCount() {
return graphQueue.size();
}
public boolean hasUnprocessedGraphs() {
return !graphQueue.isEmpty();
}
private CallsiteHolder currentGraph() {
return graphQueue.peek();
}
private void popGraph() {
graphQueue.pop();
assert graphQueue.size() <= maxGraphs : Assertions.errorMessageContext("graphQueue", graphQueue, "maxGraphs", maxGraphs);
}
private void popGraphs(int count) {
assert NumUtil.assertNonNegativeInt(count);
for (int i = 0; i < count; i++) {
graphQueue.pop();
}
}
private static final Object[] NO_CONTEXT = {};
/**
* Gets the call hierarchy of this inlining from outer most call to inner most callee.
*/
private Object[] inliningContext() {
if (!debug.isDumpEnabled(DebugContext.INFO_LEVEL)) {
return NO_CONTEXT;
}
Object[] result = new Object[graphQueue.size()];
int i = 0;
for (CallsiteHolder g : graphQueue) {
result[i++] = g.method();
}
return result;
}
private MethodInvocation currentInvocation() {
return invocationQueue.peekFirst();
}
private void pushInvocationAndGraphs(MethodInvocation methodInvocation) {
invocationQueue.addFirst(methodInvocation);
InlineInfo info = methodInvocation.callee();
maxGraphs += info.numberOfMethods();
assert graphQueue.size() <= maxGraphs : Assertions.errorMessageContext("graphQueue", graphQueue, "maxGraphs", maxGraphs);
for (int i = 0; i < info.numberOfMethods(); i++) {
CallsiteHolder ch = methodInvocation.buildCallsiteHolderForElement(i);
assert !contains(ch.graph());
graphQueue.push(ch);
assert graphQueue.size() <= maxGraphs : Assertions.errorMessageContext("graphQueue", graphQueue, "maxGraphs", maxGraphs);
}
}
private void popInvocation() {
maxGraphs -= invocationQueue.peekFirst().callee().numberOfMethods();
assert graphQueue.size() <= maxGraphs : Assertions.errorMessageContext("graphQueue", graphQueue, "maxGraphs", maxGraphs);
invocationQueue.removeFirst();
}
public int countRecursiveInlining(ResolvedJavaMethod method) {
int count = 0;
for (CallsiteHolder callsiteHolder : graphQueue) {
if (method.equals(callsiteHolder.method())) {
count++;
}
}
return count;
}
public int inliningDepth() {
assert invocationQueue.size() > 0 : this;
return invocationQueue.size() - 1;
}
@Override
public String toString() {
StringBuilder result = new StringBuilder("Invocations: ");
for (MethodInvocation invocation : invocationQueue) {
if (invocation.callee() != null) {
result.append(invocation.callee().numberOfMethods());
result.append("x ");
result.append(invocation.callee().invoke());
result.append("; ");
}
}
result.append("\nGraphs: ");
for (CallsiteHolder graph : graphQueue) {
result.append(graph.graph());
result.append("; ");
}
return result.toString();
}
private boolean contains(StructuredGraph graph) {
assert graph != null;
for (CallsiteHolder info : graphQueue) {
if (info.graph() == graph) {
return true;
}
}
return false;
}
/**
* <p>
* The stack realized by {@link InliningData} grows and shrinks as choices are made among the
* alternatives below:
* <ol>
* <li>not worth inlining: pop stack top, which comprises:
* <ul>
* <li>pop any remaining graphs not yet delved into</li>
* <li>pop the current invocation</li>
* </ul>
* </li>
* <li>{@link #processNextInvoke() delve} into one of the callsites hosted in the current graph,
* such callsite is explored next by {@link #moveForward()}</li>
* <li>{@link #tryToInline(MethodInvocation, int) try to inline}: move past the current graph
* (remove it from the topmost element).
* <ul>
* <li>If that was the last one then {@link #tryToInline(MethodInvocation, int) try to inline}
* the callsite under consideration (ie, the "current invocation").</li>
* <li>Whether inlining occurs or not, that callsite is removed from the top of
* {@link InliningData} .</li>
* </ul>
* </li>
* </ol>
* </p>
*
* <p>
* Some facts about the alternatives above:
* <ul>
* <li>the first step amounts to backtracking, the 2nd one to depth-search, and the 3rd one also
* involves backtracking (however possibly after inlining).</li>
* <li>the choice of abandon-and-backtrack or delve-into depends on
* {@link InliningPolicy#isWorthInlining} and {@link InliningPolicy#continueInlining}.</li>
* <li>the 3rd choice is picked whenever none of the previous choices are made</li>
* </ul>
* </p>
*
* @return true iff inlining was actually performed
*/
@SuppressWarnings("try")
public boolean moveForward() {
final MethodInvocation currentInvocation = currentInvocation();
final boolean backtrack = (!currentInvocation.isRoot() &&
!inliningPolicy.isWorthInlining(context.getReplacements(), currentInvocation, currentInvocation.callee(), inliningDepth(), false).shouldInline());
if (backtrack) {
int remainingGraphs = currentInvocation.totalGraphs() - currentInvocation.processedGraphs();
assert NumUtil.assertPositiveInt(remainingGraphs);
popGraphs(remainingGraphs);
popInvocation();
return false;
}
final boolean delve = currentGraph().hasRemainingInvokes() && inliningPolicy.continueInlining(currentGraph().graph());
if (delve) {
processNextInvoke();
return false;
}
popGraph();
if (currentInvocation.isRoot()) {
return false;
}
// try to inline
assert currentInvocation.callee().invoke().asNode().isAlive();
currentInvocation.incrementProcessedGraphs();
if (currentInvocation.processedGraphs() == currentInvocation.totalGraphs()) {
/*
* "all of currentInvocation's graphs processed" amounts to
* "all concrete methods that come into question already had the callees they contain analyzed for inlining"
*/
popInvocation();
try (DebugContext.Scope s = debug.scope("Inlining", inliningContext())) {
if (tryToInline(currentInvocation, inliningDepth() + 1)) {
// Report real progress only if we inline into the root graph
return currentGraph().graph() == rootGraph;
}
return false;
} catch (Throwable e) {
throw debug.handle(e);
}
}
return false;
}
/**
* Checks an invariant that {@link #moveForward()} must maintain: "the top invocation records
* how many concrete target methods (for it) remain on the {@link #graphQueue}; those targets
* 'belong' to the current invocation in question.
*/
private boolean topGraphsForTopInvocation() {
if (invocationQueue.isEmpty()) {
assert graphQueue.isEmpty();
return true;
}
if (currentInvocation().isRoot()) {
if (!graphQueue.isEmpty()) {
assert graphQueue.size() == 1 : graphQueue;
}
return true;
}
final int remainingGraphs = currentInvocation().totalGraphs() - currentInvocation().processedGraphs();
final Iterator<CallsiteHolder> iter = graphQueue.iterator();
for (int i = (remainingGraphs - 1); i >= 0; i--) {
if (!iter.hasNext()) {
assert false;
return false;
}
CallsiteHolder queuedTargetCH = iter.next();
Inlineable targetIE = currentInvocation().callee().inlineableElementAt(i);
InlineableGraph targetIG = (InlineableGraph) targetIE;
assert queuedTargetCH.method().equals(targetIG.getGraph().method());
}
return true;
}
/**
* This method checks invariants for this class. Named after shorthand for "internal
* representation is ok".
*/
public boolean repOK() {
assert topGraphsForTopInvocation();
return true;
}
}
|
apache/harmony | 35,774 | classlib/modules/math/src/test/java/tests/api/java/math/BigDecimalTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tests.api.java.math;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.MathContext;
import java.math.RoundingMode;
public class BigDecimalTest extends junit.framework.TestCase {
BigInteger value = new BigInteger("12345908");
BigInteger value2 = new BigInteger("12334560000");
/**
* @tests java.math.BigDecimal#BigDecimal(java.math.BigInteger)
*/
public void test_ConstructorLjava_math_BigInteger() {
BigDecimal big = new BigDecimal(value);
assertTrue("the BigDecimal value is not initialized properly", big
.unscaledValue().equals(value)
&& big.scale() == 0);
}
/**
* @tests java.math.BigDecimal#BigDecimal(java.math.BigInteger, int)
*/
public void test_ConstructorLjava_math_BigIntegerI() {
BigDecimal big = new BigDecimal(value2, 5);
assertTrue("the BigDecimal value is not initialized properly", big
.unscaledValue().equals(value2)
&& big.scale() == 5);
assertTrue("the BigDecimal value is not represented properly", big
.toString().equals("123345.60000"));
}
/**
* @tests java.math.BigDecimal#BigDecimal(double)
*/
public void test_ConstructorD() {
BigDecimal big = new BigDecimal(123E04);
assertTrue(
"the BigDecimal value taking a double argument is not initialized properly",
big.toString().equals("1230000"));
big = new BigDecimal(1.2345E-12);
assertTrue("the double representation is not correct", big
.doubleValue() == 1.2345E-12);
big = new BigDecimal(-12345E-3);
assertTrue("the double representation is not correct", big
.doubleValue() == -12.345);
big = new BigDecimal(5.1234567897654321e138);
assertTrue("the double representation is not correct", big
.doubleValue() == 5.1234567897654321E138
&& big.scale() == 0);
big = new BigDecimal(0.1);
assertTrue(
"the double representation of 0.1 bigDecimal is not correct",
big.doubleValue() == 0.1);
big = new BigDecimal(0.00345);
assertTrue(
"the double representation of 0.00345 bigDecimal is not correct",
big.doubleValue() == 0.00345);
// regression test for HARMONY-2429
big = new BigDecimal(-0.0);
assertTrue(
"the double representation of -0.0 bigDecimal is not correct",
big.scale() == 0);
}
/**
* @tests java.math.BigDecimal#BigDecimal(java.lang.String)
*/
public void test_ConstructorLjava_lang_String() throws NumberFormatException {
BigDecimal big = new BigDecimal("345.23499600293850");
assertTrue("the BigDecimal value is not initialized properly", big
.toString().equals("345.23499600293850")
&& big.scale() == 14);
big = new BigDecimal("-12345");
assertTrue("the BigDecimal value is not initialized properly", big
.toString().equals("-12345")
&& big.scale() == 0);
big = new BigDecimal("123.");
assertTrue("the BigDecimal value is not initialized properly", big
.toString().equals("123")
&& big.scale() == 0);
new BigDecimal("1.234E02");
}
/**
* @tests java.math.BigDecimal#BigDecimal(java.lang.String)
*/
public void test_constructor_String_plus_exp() {
/*
* BigDecimal does not support a + sign in the exponent when converting
* from a String
*/
new BigDecimal(+23e-0);
new BigDecimal(-23e+0);
}
/**
* @tests java.math.BigDecimal#BigDecimal(java.lang.String)
*/
public void test_constructor_String_empty() {
try {
new BigDecimal("");
fail("NumberFormatException expected");
} catch (NumberFormatException e) {
}
}
/**
* @tests java.math.BigDecimal#BigDecimal(java.lang.String)
*/
public void test_constructor_String_plus_minus_exp() {
try {
new BigDecimal("+35e+-2");
fail("NumberFormatException expected");
} catch (NumberFormatException e) {
}
try {
new BigDecimal("-35e-+2");
fail("NumberFormatException expected");
} catch (NumberFormatException e) {
}
}
/**
* @tests java.math.BigDecimal#BigDecimal(char[])
*/
public void test_constructor_CC_plus_minus_exp() {
try {
new BigDecimal("+35e+-2".toCharArray());
fail("NumberFormatException expected");
} catch (NumberFormatException e) {
}
try {
new BigDecimal("-35e-+2".toCharArray());
fail("NumberFormatException expected");
} catch (NumberFormatException e) {
}
}
/**
* @tests java.math.BigDecimal#abs()
*/
public void test_abs() {
BigDecimal big = new BigDecimal("-1234");
BigDecimal bigabs = big.abs();
assertTrue("the absolute value of -1234 is not 1234", bigabs.toString()
.equals("1234"));
big = new BigDecimal(new BigInteger("2345"), 2);
bigabs = big.abs();
assertTrue("the absolute value of 23.45 is not 23.45", bigabs
.toString().equals("23.45"));
}
/**
* @tests java.math.BigDecimal#add(java.math.BigDecimal)
*/
public void test_addLjava_math_BigDecimal() {
BigDecimal add1 = new BigDecimal("23.456");
BigDecimal add2 = new BigDecimal("3849.235");
BigDecimal sum = add1.add(add2);
assertTrue("the sum of 23.456 + 3849.235 is wrong", sum.unscaledValue()
.toString().equals("3872691")
&& sum.scale() == 3);
assertTrue("the sum of 23.456 + 3849.235 is not printed correctly", sum
.toString().equals("3872.691"));
BigDecimal add3 = new BigDecimal(12.34E02D);
assertTrue("the sum of 23.456 + 12.34E02 is not printed correctly",
(add1.add(add3)).toString().equals("1257.456"));
}
/**
* @tests java.math.BigDecimal#compareTo(java.math.BigDecimal)
*/
public void test_compareToLjava_math_BigDecimal() {
BigDecimal comp1 = new BigDecimal("1.00");
BigDecimal comp2 = new BigDecimal(1.000000D);
assertTrue("1.00 and 1.000000 should be equal",
comp1.compareTo(comp2) == 0);
BigDecimal comp3 = new BigDecimal("1.02");
assertTrue("1.02 should be bigger than 1.00",
comp3.compareTo(comp1) == 1);
BigDecimal comp4 = new BigDecimal(0.98D);
assertTrue("0.98 should be less than 1.00",
comp4.compareTo(comp1) == -1);
}
/**
* @tests java.math.BigDecimal#divide(java.math.BigDecimal, int)
*/
public void test_divideLjava_math_BigDecimalI() {
BigDecimal divd1 = new BigDecimal(value, 2);
BigDecimal divd2 = new BigDecimal("2.335");
BigDecimal divd3 = divd1.divide(divd2, BigDecimal.ROUND_UP);
assertTrue("123459.08/2.335 is not correct", divd3.toString().equals(
"52873.27")
&& divd3.scale() == divd1.scale());
assertTrue(
"the unscaledValue representation of 123459.08/2.335 is not correct",
divd3.unscaledValue().toString().equals("5287327"));
divd2 = new BigDecimal(123.4D);
divd3 = divd1.divide(divd2, BigDecimal.ROUND_DOWN);
assertTrue("123459.08/123.4 is not correct", divd3.toString().equals(
"1000.47")
&& divd3.scale() == 2);
divd2 = new BigDecimal(000D);
try {
divd1.divide(divd2, BigDecimal.ROUND_DOWN);
fail("divide by zero is not caught");
} catch (ArithmeticException e) {
}
}
/**
* @tests java.math.BigDecimal#divide(java.math.BigDecimal, int, int)
*/
public void test_divideLjava_math_BigDecimalII() {
BigDecimal divd1 = new BigDecimal(value2, 4);
BigDecimal divd2 = new BigDecimal("0.0023");
BigDecimal divd3 = divd1.divide(divd2, 3, BigDecimal.ROUND_HALF_UP);
assertTrue("1233456/0.0023 is not correct", divd3.toString().equals(
"536285217.391")
&& divd3.scale() == 3);
divd2 = new BigDecimal(1345.5E-02D);
divd3 = divd1.divide(divd2, 0, BigDecimal.ROUND_DOWN);
assertTrue(
"1233456/13.455 is not correct or does not have the correct scale",
divd3.toString().equals("91672") && divd3.scale() == 0);
divd2 = new BigDecimal(0000D);
try {
divd1.divide(divd2, 4, BigDecimal.ROUND_DOWN);
fail("divide by zero is not caught");
} catch (ArithmeticException e) {
}
}
/**
* @tests java.math.BigDecimal#doubleValue()
*/
public void test_doubleValue() {
BigDecimal bigDB = new BigDecimal(-1.234E-112);
// Commenting out this part because it causes an endless loop (see HARMONY-319 and HARMONY-329)
// assertTrue(
// "the double representation of this BigDecimal is not correct",
// bigDB.doubleValue() == -1.234E-112);
bigDB = new BigDecimal(5.00E-324);
assertTrue("the double representation of bigDecimal is not correct",
bigDB.doubleValue() == 5.00E-324);
bigDB = new BigDecimal(1.79E308);
assertTrue("the double representation of bigDecimal is not correct",
bigDB.doubleValue() == 1.79E308 && bigDB.scale() == 0);
bigDB = new BigDecimal(-2.33E102);
assertTrue(
"the double representation of bigDecimal -2.33E102 is not correct",
bigDB.doubleValue() == -2.33E102 && bigDB.scale() == 0);
bigDB = new BigDecimal(Double.MAX_VALUE);
bigDB = bigDB.add(bigDB);
assertTrue(
"a + number out of the double range should return infinity",
bigDB.doubleValue() == Double.POSITIVE_INFINITY);
bigDB = new BigDecimal(-Double.MAX_VALUE);
bigDB = bigDB.add(bigDB);
assertTrue(
"a - number out of the double range should return neg infinity",
bigDB.doubleValue() == Double.NEGATIVE_INFINITY);
}
/**
* @tests java.math.BigDecimal#equals(java.lang.Object)
*/
public void test_equalsLjava_lang_Object() {
BigDecimal equal1 = new BigDecimal(1.00D);
BigDecimal equal2 = new BigDecimal("1.0");
assertFalse("1.00 and 1.0 should not be equal",
equal1.equals(equal2));
equal2 = new BigDecimal(1.01D);
assertFalse("1.00 and 1.01 should not be equal",
equal1.equals(equal2));
equal2 = new BigDecimal("1.00");
assertFalse("1.00D and 1.00 should not be equal",
equal1.equals(equal2));
BigInteger val = new BigInteger("100");
equal1 = new BigDecimal("1.00");
equal2 = new BigDecimal(val, 2);
assertTrue("1.00(string) and 1.00(bigInteger) should be equal", equal1
.equals(equal2));
equal1 = new BigDecimal(100D);
equal2 = new BigDecimal("2.34576");
assertFalse("100D and 2.34576 should not be equal", equal1
.equals(equal2));
assertFalse("bigDecimal 100D does not equal string 23415", equal1
.equals("23415"));
}
/**
* @tests java.math.BigDecimal#floatValue()
*/
public void test_floatValue() {
BigDecimal fl1 = new BigDecimal("234563782344567");
assertTrue("the float representation of bigDecimal 234563782344567",
fl1.floatValue() == 234563782344567f);
BigDecimal fl2 = new BigDecimal(2.345E37);
assertTrue("the float representation of bigDecimal 2.345E37", fl2
.floatValue() == 2.345E37F);
fl2 = new BigDecimal(-1.00E-44);
assertTrue("the float representation of bigDecimal -1.00E-44", fl2
.floatValue() == -1.00E-44F);
fl2 = new BigDecimal(-3E12);
assertTrue("the float representation of bigDecimal -3E12", fl2
.floatValue() == -3E12F);
fl2 = new BigDecimal(Double.MAX_VALUE);
assertTrue(
"A number can't be represented by float should return infinity",
fl2.floatValue() == Float.POSITIVE_INFINITY);
fl2 = new BigDecimal(-Double.MAX_VALUE);
assertTrue(
"A number can't be represented by float should return infinity",
fl2.floatValue() == Float.NEGATIVE_INFINITY);
}
/**
* @tests java.math.BigDecimal#hashCode()
*/
public void test_hashCode() {
// anything that is equal must have the same hashCode
BigDecimal hash = new BigDecimal("1.00");
BigDecimal hash2 = new BigDecimal(1.00D);
assertTrue("the hashCode of 1.00 and 1.00D is equal",
hash.hashCode() != hash2.hashCode() && !hash.equals(hash2));
hash2 = new BigDecimal("1.0");
assertTrue("the hashCode of 1.0 and 1.00 is equal",
hash.hashCode() != hash2.hashCode() && !hash.equals(hash2));
BigInteger val = new BigInteger("100");
hash2 = new BigDecimal(val, 2);
assertTrue("hashCode of 1.00 and 1.00(bigInteger) is not equal", hash
.hashCode() == hash2.hashCode()
&& hash.equals(hash2));
hash = new BigDecimal(value, 2);
hash2 = new BigDecimal("-1233456.0000");
assertTrue("hashCode of 123459.08 and -1233456.0000 is not equal", hash
.hashCode() != hash2.hashCode()
&& !hash.equals(hash2));
hash2 = new BigDecimal(value.negate(), 2);
assertTrue("hashCode of 123459.08 and -123459.08 is not equal", hash
.hashCode() != hash2.hashCode()
&& !hash.equals(hash2));
}
/**
* @tests java.math.BigDecimal#intValue()
*/
public void test_intValue() {
BigDecimal int1 = new BigDecimal(value, 3);
assertTrue("the int value of 12345.908 is not 12345",
int1.intValue() == 12345);
int1 = new BigDecimal("1.99");
assertTrue("the int value of 1.99 is not 1", int1.intValue() == 1);
int1 = new BigDecimal("23423419083091823091283933");
// ran JDK and found representation for the above was -249268259
assertTrue("the int value of 23423419083091823091283933 is wrong", int1
.intValue() == -249268259);
int1 = new BigDecimal(-1235D);
assertTrue("the int value of -1235 is not -1235",
int1.intValue() == -1235);
}
/**
* @tests java.math.BigDecimal#longValue()
*/
public void test_longValue() {
BigDecimal long1 = new BigDecimal(value2.negate(), 0);
assertTrue("the long value of 12334560000 is not 12334560000", long1
.longValue() == -12334560000L);
long1 = new BigDecimal(-1345.348E-123D);
assertTrue("the long value of -1345.348E-123D is not zero", long1
.longValue() == 0);
long1 = new BigDecimal("31323423423419083091823091283933");
// ran JDK and found representation for the above was
// -5251313250005125155
assertTrue(
"the long value of 31323423423419083091823091283933 is wrong",
long1.longValue() == -5251313250005125155L);
}
/**
* @tests java.math.BigDecimal#max(java.math.BigDecimal)
*/
public void test_maxLjava_math_BigDecimal() {
BigDecimal max1 = new BigDecimal(value2, 1);
BigDecimal max2 = new BigDecimal(value2, 4);
assertTrue("1233456000.0 is not greater than 1233456", max1.max(max2)
.equals(max1));
max1 = new BigDecimal(-1.224D);
max2 = new BigDecimal(-1.2245D);
assertTrue("-1.224 is not greater than -1.2245", max1.max(max2).equals(
max1));
max1 = new BigDecimal(123E18);
max2 = new BigDecimal(123E19);
assertTrue("123E19 is the not the max", max1.max(max2).equals(max2));
}
/**
* @tests java.math.BigDecimal#min(java.math.BigDecimal)
*/
public void test_minLjava_math_BigDecimal() {
BigDecimal min1 = new BigDecimal(-12345.4D);
BigDecimal min2 = new BigDecimal(-12345.39D);
assertTrue("-12345.39 should have been returned", min1.min(min2)
.equals(min1));
min1 = new BigDecimal(value2, 5);
min2 = new BigDecimal(value2, 0);
assertTrue("123345.6 should have been returned", min1.min(min2).equals(
min1));
}
/**
* @tests java.math.BigDecimal#movePointLeft(int)
*/
public void test_movePointLeftI() {
BigDecimal movePtLeft = new BigDecimal("123456265.34");
BigDecimal alreadyMoved = movePtLeft.movePointLeft(5);
assertTrue("move point left 5 failed", alreadyMoved.scale() == 7
&& alreadyMoved.toString().equals("1234.5626534"));
movePtLeft = new BigDecimal(value2.negate(), 0);
alreadyMoved = movePtLeft.movePointLeft(12);
assertTrue("move point left 12 failed", alreadyMoved.scale() == 12
&& alreadyMoved.toString().equals("-0.012334560000"));
movePtLeft = new BigDecimal(123E18);
alreadyMoved = movePtLeft.movePointLeft(2);
assertTrue("move point left 2 failed",
alreadyMoved.scale() == movePtLeft.scale() + 2
&& alreadyMoved.doubleValue() == 1.23E18);
movePtLeft = new BigDecimal(1.123E-12);
alreadyMoved = movePtLeft.movePointLeft(3);
assertTrue("move point left 3 failed",
alreadyMoved.scale() == movePtLeft.scale() + 3
&& alreadyMoved.doubleValue() == 1.123E-15);
movePtLeft = new BigDecimal(value, 2);
alreadyMoved = movePtLeft.movePointLeft(-2);
assertTrue("move point left -2 failed",
alreadyMoved.scale() == movePtLeft.scale() - 2
&& alreadyMoved.toString().equals("12345908"));
}
/**
* @tests java.math.BigDecimal#movePointRight(int)
*/
public void test_movePointRightI() {
BigDecimal movePtRight = new BigDecimal("-1.58796521458");
BigDecimal alreadyMoved = movePtRight.movePointRight(8);
assertTrue("move point right 8 failed", alreadyMoved.scale() == 3
&& alreadyMoved.toString().equals("-158796521.458"));
movePtRight = new BigDecimal(value, 2);
alreadyMoved = movePtRight.movePointRight(4);
assertTrue("move point right 4 failed", alreadyMoved.scale() == 0
&& alreadyMoved.toString().equals("1234590800"));
movePtRight = new BigDecimal(134E12);
alreadyMoved = movePtRight.movePointRight(2);
assertTrue("move point right 2 failed", alreadyMoved.scale() == 0
&& alreadyMoved.toString().equals("13400000000000000"));
movePtRight = new BigDecimal(-3.4E-10);
alreadyMoved = movePtRight.movePointRight(5);
assertTrue("move point right 5 failed",
alreadyMoved.scale() == movePtRight.scale() - 5
&& alreadyMoved.doubleValue() == -0.000034);
alreadyMoved = alreadyMoved.movePointRight(-5);
assertTrue("move point right -5 failed", alreadyMoved
.equals(movePtRight));
}
/**
* @tests java.math.BigDecimal#multiply(java.math.BigDecimal)
*/
public void test_multiplyLjava_math_BigDecimal() {
BigDecimal multi1 = new BigDecimal(value, 5);
BigDecimal multi2 = new BigDecimal(2.345D);
BigDecimal result = multi1.multiply(multi2);
assertTrue("123.45908 * 2.345 is not correct: " + result, result
.toString().startsWith("289.51154260")
&& result.scale() == multi1.scale() + multi2.scale());
multi1 = new BigDecimal("34656");
multi2 = new BigDecimal("-2");
result = multi1.multiply(multi2);
assertTrue("34656 * 2 is not correct", result.toString().equals(
"-69312")
&& result.scale() == 0);
multi1 = new BigDecimal(-2.345E-02);
multi2 = new BigDecimal(-134E130);
result = multi1.multiply(multi2);
assertTrue("-2.345E-02 * -134E130 is not correct " + result.doubleValue(),
result.doubleValue() == 3.1422999999999997E130
&& result.scale() == multi1.scale() + multi2.scale());
multi1 = new BigDecimal("11235");
multi2 = new BigDecimal("0");
result = multi1.multiply(multi2);
assertTrue("11235 * 0 is not correct", result.doubleValue() == 0
&& result.scale() == 0);
multi1 = new BigDecimal("-0.00234");
multi2 = new BigDecimal(13.4E10);
result = multi1.multiply(multi2);
assertTrue("-0.00234 * 13.4E10 is not correct",
result.doubleValue() == -313560000
&& result.scale() == multi1.scale() + multi2.scale());
}
/**
* @tests java.math.BigDecimal#negate()
*/
public void test_negate() {
BigDecimal negate1 = new BigDecimal(value2, 7);
assertTrue("the negate of 1233.4560000 is not -1233.4560000", negate1
.negate().toString().equals("-1233.4560000"));
negate1 = new BigDecimal("-23465839");
assertTrue("the negate of -23465839 is not 23465839", negate1.negate()
.toString().equals("23465839"));
negate1 = new BigDecimal(-3.456E6);
assertTrue("the negate of -3.456E6 is not 3.456E6", negate1.negate()
.negate().equals(negate1));
}
/**
* @tests java.math.BigDecimal#scale()
*/
public void test_scale() {
BigDecimal scale1 = new BigDecimal(value2, 8);
assertTrue("the scale of the number 123.34560000 is wrong", scale1
.scale() == 8);
BigDecimal scale2 = new BigDecimal("29389.");
assertTrue("the scale of the number 29389. is wrong",
scale2.scale() == 0);
BigDecimal scale3 = new BigDecimal(3.374E13);
assertTrue("the scale of the number 3.374E13 is wrong",
scale3.scale() == 0);
BigDecimal scale4 = new BigDecimal("-3.45E-203");
// note the scale is calculated as 15 digits of 345000.... + exponent -
// 1. -1 for the 3
assertTrue("the scale of the number -3.45E-203 is wrong: "
+ scale4.scale(), scale4.scale() == 205);
scale4 = new BigDecimal("-345.4E-200");
assertTrue("the scale of the number -345.4E-200 is wrong", scale4
.scale() == 201);
}
/**
* @tests java.math.BigDecimal#setScale(int)
*/
public void test_setScaleI() {
// rounding mode defaults to zero
BigDecimal setScale1 = new BigDecimal(value, 3);
BigDecimal setScale2 = setScale1.setScale(5);
BigInteger setresult = new BigInteger("1234590800");
assertTrue("the number 12345.908 after setting scale is wrong",
setScale2.unscaledValue().equals(setresult)
&& setScale2.scale() == 5);
try {
setScale2 = setScale1.setScale(2, BigDecimal.ROUND_UNNECESSARY);
fail("arithmetic Exception not caught as a result of loosing precision");
} catch (ArithmeticException e) {
}
}
/**
* @tests java.math.BigDecimal#setScale(int, int)
*/
public void test_setScaleII() {
BigDecimal setScale1 = new BigDecimal(2.323E102);
BigDecimal setScale2 = setScale1.setScale(4);
assertTrue("the number 2.323E102 after setting scale is wrong",
setScale2.scale() == 4);
assertTrue("the representation of the number 2.323E102 is wrong",
setScale2.doubleValue() == 2.323E102);
setScale1 = new BigDecimal("-1.253E-12");
setScale2 = setScale1.setScale(17, BigDecimal.ROUND_CEILING);
assertTrue("the number -1.253E-12 after setting scale is wrong",
setScale2.scale() == 17);
assertTrue(
"the representation of the number -1.253E-12 after setting scale is wrong, " + setScale2.toString(),
setScale2.toString().equals("-1.25300E-12"));
// testing rounding Mode ROUND_CEILING
setScale1 = new BigDecimal(value, 4);
setScale2 = setScale1.setScale(1, BigDecimal.ROUND_CEILING);
assertTrue(
"the number 1234.5908 after setting scale to 1/ROUND_CEILING is wrong",
setScale2.toString().equals("1234.6") && setScale2.scale() == 1);
BigDecimal setNeg = new BigDecimal(value.negate(), 4);
setScale2 = setNeg.setScale(1, BigDecimal.ROUND_CEILING);
assertTrue(
"the number -1234.5908 after setting scale to 1/ROUND_CEILING is wrong",
setScale2.toString().equals("-1234.5")
&& setScale2.scale() == 1);
// testing rounding Mode ROUND_DOWN
setScale2 = setNeg.setScale(1, BigDecimal.ROUND_DOWN);
assertTrue(
"the number -1234.5908 after setting scale to 1/ROUND_DOWN is wrong",
setScale2.toString().equals("-1234.5")
&& setScale2.scale() == 1);
setScale1 = new BigDecimal(value, 4);
setScale2 = setScale1.setScale(1, BigDecimal.ROUND_DOWN);
assertTrue(
"the number 1234.5908 after setting scale to 1/ROUND_DOWN is wrong",
setScale2.toString().equals("1234.5") && setScale2.scale() == 1);
// testing rounding Mode ROUND_FLOOR
setScale2 = setScale1.setScale(1, BigDecimal.ROUND_FLOOR);
assertTrue(
"the number 1234.5908 after setting scale to 1/ROUND_FLOOR is wrong",
setScale2.toString().equals("1234.5") && setScale2.scale() == 1);
setScale2 = setNeg.setScale(1, BigDecimal.ROUND_FLOOR);
assertTrue(
"the number -1234.5908 after setting scale to 1/ROUND_FLOOR is wrong",
setScale2.toString().equals("-1234.6")
&& setScale2.scale() == 1);
// testing rounding Mode ROUND_HALF_DOWN
setScale2 = setScale1.setScale(3, BigDecimal.ROUND_HALF_DOWN);
assertTrue(
"the number 1234.5908 after setting scale to 3/ROUND_HALF_DOWN is wrong",
setScale2.toString().equals("1234.591")
&& setScale2.scale() == 3);
setScale1 = new BigDecimal(new BigInteger("12345000"), 5);
setScale2 = setScale1.setScale(1, BigDecimal.ROUND_HALF_DOWN);
assertTrue(
"the number 123.45908 after setting scale to 1/ROUND_HALF_DOWN is wrong",
setScale2.toString().equals("123.4") && setScale2.scale() == 1);
setScale2 = new BigDecimal("-1234.5000").setScale(0,
BigDecimal.ROUND_HALF_DOWN);
assertTrue(
"the number -1234.5908 after setting scale to 0/ROUND_HALF_DOWN is wrong",
setScale2.toString().equals("-1234") && setScale2.scale() == 0);
// testing rounding Mode ROUND_HALF_EVEN
setScale1 = new BigDecimal(1.2345789D);
setScale2 = setScale1.setScale(4, BigDecimal.ROUND_HALF_EVEN);
assertTrue(
"the number 1.2345789 after setting scale to 4/ROUND_HALF_EVEN is wrong",
setScale2.doubleValue() == 1.2346D && setScale2.scale() == 4);
setNeg = new BigDecimal(-1.2335789D);
setScale2 = setNeg.setScale(2, BigDecimal.ROUND_HALF_EVEN);
assertTrue(
"the number -1.2335789 after setting scale to 2/ROUND_HALF_EVEN is wrong",
setScale2.doubleValue() == -1.23D && setScale2.scale() == 2);
setScale2 = new BigDecimal("1.2345000").setScale(3,
BigDecimal.ROUND_HALF_EVEN);
assertTrue(
"the number 1.2345789 after setting scale to 3/ROUND_HALF_EVEN is wrong",
setScale2.doubleValue() == 1.234D && setScale2.scale() == 3);
setScale2 = new BigDecimal("-1.2345000").setScale(3,
BigDecimal.ROUND_HALF_EVEN);
assertTrue(
"the number -1.2335789 after setting scale to 3/ROUND_HALF_EVEN is wrong",
setScale2.doubleValue() == -1.234D && setScale2.scale() == 3);
// testing rounding Mode ROUND_HALF_UP
setScale1 = new BigDecimal("134567.34650");
setScale2 = setScale1.setScale(3, BigDecimal.ROUND_HALF_UP);
assertTrue(
"the number 134567.34658 after setting scale to 3/ROUND_HALF_UP is wrong",
setScale2.toString().equals("134567.347")
&& setScale2.scale() == 3);
setNeg = new BigDecimal("-1234.4567");
setScale2 = setNeg.setScale(0, BigDecimal.ROUND_HALF_UP);
assertTrue(
"the number -1234.4567 after setting scale to 0/ROUND_HALF_UP is wrong",
setScale2.toString().equals("-1234") && setScale2.scale() == 0);
// testing rounding Mode ROUND_UNNECESSARY
try {
setScale1.setScale(3, BigDecimal.ROUND_UNNECESSARY);
fail("arithmetic Exception not caught for round unnecessary");
} catch (ArithmeticException e) {
}
// testing rounding Mode ROUND_UP
setScale1 = new BigDecimal("100000.374");
setScale2 = setScale1.setScale(2, BigDecimal.ROUND_UP);
assertTrue(
"the number 100000.374 after setting scale to 2/ROUND_UP is wrong",
setScale2.toString().equals("100000.38")
&& setScale2.scale() == 2);
setNeg = new BigDecimal(-134.34589D);
setScale2 = setNeg.setScale(2, BigDecimal.ROUND_UP);
assertTrue(
"the number -134.34589 after setting scale to 2/ROUND_UP is wrong",
setScale2.doubleValue() == -134.35D && setScale2.scale() == 2);
// testing invalid rounding modes
try {
setScale2 = setScale1.setScale(0, -123);
fail("IllegalArgumentException is not caught for wrong rounding mode");
} catch (IllegalArgumentException e) {
}
}
/**
* @tests java.math.BigDecimal#signum()
*/
public void test_signum() {
BigDecimal sign = new BigDecimal(123E-104);
assertTrue("123E-104 is not positive in signum()", sign.signum() == 1);
sign = new BigDecimal("-1234.3959");
assertTrue("-1234.3959 is not negative in signum()",
sign.signum() == -1);
sign = new BigDecimal(000D);
assertTrue("000D is not zero in signum()", sign.signum() == 0);
}
/**
* @tests java.math.BigDecimal#subtract(java.math.BigDecimal)
*/
public void test_subtractLjava_math_BigDecimal() {
BigDecimal sub1 = new BigDecimal("13948");
BigDecimal sub2 = new BigDecimal("2839.489");
BigDecimal result = sub1.subtract(sub2);
assertTrue("13948 - 2839.489 is wrong: " + result, result.toString()
.equals("11108.511")
&& result.scale() == 3);
BigDecimal result2 = sub2.subtract(sub1);
assertTrue("2839.489 - 13948 is wrong", result2.toString().equals(
"-11108.511")
&& result2.scale() == 3);
assertTrue("13948 - 2839.489 is not the negative of 2839.489 - 13948",
result.equals(result2.negate()));
sub1 = new BigDecimal(value, 1);
sub2 = new BigDecimal("0");
result = sub1.subtract(sub2);
assertTrue("1234590.8 - 0 is wrong", result.equals(sub1));
sub1 = new BigDecimal(1.234E-03);
sub2 = new BigDecimal(3.423E-10);
result = sub1.subtract(sub2);
assertTrue("1.234E-03 - 3.423E-10 is wrong, " + result.doubleValue(),
result.doubleValue() == 0.0012339996577);
sub1 = new BigDecimal(1234.0123);
sub2 = new BigDecimal(1234.0123000);
result = sub1.subtract(sub2);
assertTrue("1234.0123 - 1234.0123000 is wrong, " + result.doubleValue(),
result.doubleValue() == 0.0);
}
/**
* @tests java.math.BigDecimal#toBigInteger()
*/
public void test_toBigInteger() {
BigDecimal sub1 = new BigDecimal("-29830.989");
BigInteger result = sub1.toBigInteger();
assertTrue("the bigInteger equivalent of -29830.989 is wrong", result
.toString().equals("-29830"));
sub1 = new BigDecimal(-2837E10);
result = sub1.toBigInteger();
assertTrue("the bigInteger equivalent of -2837E10 is wrong", result
.doubleValue() == -2837E10);
sub1 = new BigDecimal(2.349E-10);
result = sub1.toBigInteger();
assertTrue("the bigInteger equivalent of 2.349E-10 is wrong", result
.equals(BigInteger.ZERO));
sub1 = new BigDecimal(value2, 6);
result = sub1.toBigInteger();
assertTrue("the bigInteger equivalent of 12334.560000 is wrong", result
.toString().equals("12334"));
}
/**
* @tests java.math.BigDecimal#toString()
*/
public void test_toString() {
BigDecimal toString1 = new BigDecimal("1234.000");
assertTrue("the toString representation of 1234.000 is wrong",
toString1.toString().equals("1234.000"));
toString1 = new BigDecimal("-123.4E-5");
assertTrue("the toString representation of -123.4E-5 is wrong: "
+ toString1, toString1.toString().equals("-0.001234"));
toString1 = new BigDecimal("-1.455E-20");
assertTrue("the toString representation of -1.455E-20 is wrong",
toString1.toString().equals("-1.455E-20"));
toString1 = new BigDecimal(value2, 4);
assertTrue("the toString representation of 1233456.0000 is wrong",
toString1.toString().equals("1233456.0000"));
}
/**
* @tests java.math.BigDecimal#unscaledValue()
*/
public void test_unscaledValue() {
BigDecimal unsVal = new BigDecimal("-2839485.000");
assertTrue("the unscaledValue of -2839485.000 is wrong", unsVal
.unscaledValue().toString().equals("-2839485000"));
unsVal = new BigDecimal(123E10);
assertTrue("the unscaledValue of 123E10 is wrong", unsVal
.unscaledValue().toString().equals("1230000000000"));
unsVal = new BigDecimal("-4.56E-13");
assertTrue("the unscaledValue of -4.56E-13 is wrong: "
+ unsVal.unscaledValue(), unsVal.unscaledValue().toString()
.equals("-456"));
unsVal = new BigDecimal(value, 3);
assertTrue("the unscaledValue of 12345.908 is wrong", unsVal
.unscaledValue().toString().equals("12345908"));
}
/**
* @tests java.math.BigDecimal#valueOf(long)
*/
public void test_valueOfJ() {
BigDecimal valueOfL = BigDecimal.valueOf(9223372036854775806L);
assertTrue("the bigDecimal equivalent of 9223372036854775806 is wrong",
valueOfL.unscaledValue().toString().equals(
"9223372036854775806")
&& valueOfL.scale() == 0);
assertTrue(
"the toString representation of 9223372036854775806 is wrong",
valueOfL.toString().equals("9223372036854775806"));
valueOfL = BigDecimal.valueOf(0L);
assertTrue("the bigDecimal equivalent of 0 is wrong", valueOfL
.unscaledValue().toString().equals("0")
&& valueOfL.scale() == 0);
}
/**
* @tests java.math.BigDecimal#valueOf(long, int)
*/
public void test_valueOfJI() {
BigDecimal valueOfJI = BigDecimal.valueOf(9223372036854775806L, 5);
assertTrue(
"the bigDecimal equivalent of 92233720368547.75806 is wrong",
valueOfJI.unscaledValue().toString().equals(
"9223372036854775806")
&& valueOfJI.scale() == 5);
assertTrue(
"the toString representation of 9223372036854775806 is wrong",
valueOfJI.toString().equals("92233720368547.75806"));
valueOfJI = BigDecimal.valueOf(1234L, 8);
assertTrue(
"the bigDecimal equivalent of 92233720368547.75806 is wrong",
valueOfJI.unscaledValue().toString().equals("1234")
&& valueOfJI.scale() == 8);
assertTrue(
"the toString representation of 9223372036854775806 is wrong",
valueOfJI.toString().equals("0.00001234"));
valueOfJI = BigDecimal.valueOf(0, 3);
assertTrue(
"the bigDecimal equivalent of 92233720368547.75806 is wrong",
valueOfJI.unscaledValue().toString().equals("0")
&& valueOfJI.scale() == 3);
assertTrue(
"the toString representation of 9223372036854775806 is wrong",
valueOfJI.toString().equals("0.000"));
}
public void test_BigDecimal_serialization() throws Exception {
// Regression for HARMONY-1896
char[] in = { '1', '5', '6', '7', '8', '7', '.', '0', '0' };
BigDecimal bd = new BigDecimal(in, 0, 9);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(bd);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bis);
BigDecimal nbd = (BigDecimal) ois.readObject();
assertEquals(bd.intValue(), nbd.intValue());
assertEquals(bd.doubleValue(), nbd.doubleValue(), 0.0);
assertEquals(bd.toString(), nbd.toString());
}
/**
* @tests java.math.BigDecimal#stripTrailingZero(long)
*/
public void test_stripTrailingZero() {
BigDecimal sixhundredtest = new BigDecimal("600.0");
assertTrue("stripTrailingZero failed for 600.0",
((sixhundredtest.stripTrailingZeros()).scale() == -2)
);
/* Single digit, no trailing zero, odd number */
BigDecimal notrailingzerotest = new BigDecimal("1");
assertTrue("stripTrailingZero failed for 1",
((notrailingzerotest.stripTrailingZeros()).scale() == 0)
);
/* Zero */
//regression for HARMONY-4623, NON-BUG DIFF with RI
BigDecimal zerotest = new BigDecimal("0.0000");
assertTrue("stripTrailingZero failed for 0.0000",
((zerotest.stripTrailingZeros()).scale() == 0)
);
}
public void testMathContextConstruction() {
String a = "-12380945E+61";
BigDecimal aNumber = new BigDecimal(a);
int precision = 6;
RoundingMode rm = RoundingMode.HALF_DOWN;
MathContext mcIntRm = new MathContext(precision, rm);
MathContext mcStr = new MathContext("precision=6 roundingMode=HALF_DOWN");
MathContext mcInt = new MathContext(precision);
BigDecimal res = aNumber.abs(mcInt);
assertEquals("MathContext Constructer with int precision failed",
res,
new BigDecimal("1.23809E+68"));
assertEquals("Equal MathContexts are not Equal ",
mcIntRm,
mcStr);
assertEquals("Different MathContext are reported as Equal ",
mcInt.equals(mcStr),
false);
assertEquals("Equal MathContexts have different hashcodes ",
mcIntRm.hashCode(),
mcStr.hashCode());
assertEquals("MathContext.toString() returning incorrect value",
mcIntRm.toString(),
"precision=6 roundingMode=HALF_DOWN");
}
}
|
googleapis/google-cloud-java | 37,781 | java-tpu/proto-google-cloud-tpu-v2alpha1/src/main/java/com/google/cloud/tpu/v2alpha1/ListRuntimeVersionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tpu/v2alpha1/cloud_tpu.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.tpu.v2alpha1;
/**
*
*
* <pre>
* Request for
* [ListRuntimeVersions][google.cloud.tpu.v2alpha1.Tpu.ListRuntimeVersions].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest}
*/
public final class ListRuntimeVersionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest)
ListRuntimeVersionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRuntimeVersionsRequest.newBuilder() to construct.
private ListRuntimeVersionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRuntimeVersionsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRuntimeVersionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListRuntimeVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListRuntimeVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.class,
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest)) {
return super.equals(obj);
}
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest other =
(com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [ListRuntimeVersions][google.cloud.tpu.v2alpha1.Tpu.ListRuntimeVersions].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest)
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListRuntimeVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListRuntimeVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.class,
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.Builder.class);
}
// Construct using com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListRuntimeVersionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest getDefaultInstanceForType() {
return com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest build() {
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest buildPartial() {
com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest result =
new com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest) {
return mergeFrom((com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest other) {
if (other == com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 42:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 42
case 50:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest)
private static final com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest();
}
public static com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRuntimeVersionsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListRuntimeVersionsRequest>() {
@java.lang.Override
public ListRuntimeVersionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRuntimeVersionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRuntimeVersionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListRuntimeVersionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,045 | java-texttospeech/proto-google-cloud-texttospeech-v1beta1/src/main/java/com/google/cloud/texttospeech/v1beta1/MultiSpeakerVoiceConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/texttospeech/v1beta1/cloud_tts.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.texttospeech.v1beta1;
/**
*
*
* <pre>
* Configuration for a multi-speaker text-to-speech setup. Enables the use of up
* to two distinct voices in a single synthesis request.
* </pre>
*
* Protobuf type {@code google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig}
*/
public final class MultiSpeakerVoiceConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig)
MultiSpeakerVoiceConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use MultiSpeakerVoiceConfig.newBuilder() to construct.
private MultiSpeakerVoiceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MultiSpeakerVoiceConfig() {
speakerVoiceConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MultiSpeakerVoiceConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto
.internal_static_google_cloud_texttospeech_v1beta1_MultiSpeakerVoiceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto
.internal_static_google_cloud_texttospeech_v1beta1_MultiSpeakerVoiceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.class,
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.Builder.class);
}
public static final int SPEAKER_VOICE_CONFIGS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>
speakerVoiceConfigs_;
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>
getSpeakerVoiceConfigsList() {
return speakerVoiceConfigs_;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder>
getSpeakerVoiceConfigsOrBuilderList() {
return speakerVoiceConfigs_;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getSpeakerVoiceConfigsCount() {
return speakerVoiceConfigs_.size();
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice getSpeakerVoiceConfigs(
int index) {
return speakerVoiceConfigs_.get(index);
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder
getSpeakerVoiceConfigsOrBuilder(int index) {
return speakerVoiceConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < speakerVoiceConfigs_.size(); i++) {
output.writeMessage(2, speakerVoiceConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < speakerVoiceConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(2, speakerVoiceConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig)) {
return super.equals(obj);
}
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig other =
(com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig) obj;
if (!getSpeakerVoiceConfigsList().equals(other.getSpeakerVoiceConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSpeakerVoiceConfigsCount() > 0) {
hash = (37 * hash) + SPEAKER_VOICE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getSpeakerVoiceConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for a multi-speaker text-to-speech setup. Enables the use of up
* to two distinct voices in a single synthesis request.
* </pre>
*
* Protobuf type {@code google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig)
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto
.internal_static_google_cloud_texttospeech_v1beta1_MultiSpeakerVoiceConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto
.internal_static_google_cloud_texttospeech_v1beta1_MultiSpeakerVoiceConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.class,
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.Builder.class);
}
// Construct using com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (speakerVoiceConfigsBuilder_ == null) {
speakerVoiceConfigs_ = java.util.Collections.emptyList();
} else {
speakerVoiceConfigs_ = null;
speakerVoiceConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto
.internal_static_google_cloud_texttospeech_v1beta1_MultiSpeakerVoiceConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig
getDefaultInstanceForType() {
return com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig build() {
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig buildPartial() {
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig result =
new com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig result) {
if (speakerVoiceConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
speakerVoiceConfigs_ = java.util.Collections.unmodifiableList(speakerVoiceConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.speakerVoiceConfigs_ = speakerVoiceConfigs_;
} else {
result.speakerVoiceConfigs_ = speakerVoiceConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig) {
return mergeFrom((com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig other) {
if (other
== com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig.getDefaultInstance())
return this;
if (speakerVoiceConfigsBuilder_ == null) {
if (!other.speakerVoiceConfigs_.isEmpty()) {
if (speakerVoiceConfigs_.isEmpty()) {
speakerVoiceConfigs_ = other.speakerVoiceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.addAll(other.speakerVoiceConfigs_);
}
onChanged();
}
} else {
if (!other.speakerVoiceConfigs_.isEmpty()) {
if (speakerVoiceConfigsBuilder_.isEmpty()) {
speakerVoiceConfigsBuilder_.dispose();
speakerVoiceConfigsBuilder_ = null;
speakerVoiceConfigs_ = other.speakerVoiceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
speakerVoiceConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSpeakerVoiceConfigsFieldBuilder()
: null;
} else {
speakerVoiceConfigsBuilder_.addAllMessages(other.speakerVoiceConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice m =
input.readMessage(
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.parser(),
extensionRegistry);
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.add(m);
} else {
speakerVoiceConfigsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>
speakerVoiceConfigs_ = java.util.Collections.emptyList();
private void ensureSpeakerVoiceConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
speakerVoiceConfigs_ =
new java.util.ArrayList<
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>(
speakerVoiceConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder>
speakerVoiceConfigsBuilder_;
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>
getSpeakerVoiceConfigsList() {
if (speakerVoiceConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(speakerVoiceConfigs_);
} else {
return speakerVoiceConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getSpeakerVoiceConfigsCount() {
if (speakerVoiceConfigsBuilder_ == null) {
return speakerVoiceConfigs_.size();
} else {
return speakerVoiceConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice getSpeakerVoiceConfigs(
int index) {
if (speakerVoiceConfigsBuilder_ == null) {
return speakerVoiceConfigs_.get(index);
} else {
return speakerVoiceConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSpeakerVoiceConfigs(
int index, com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice value) {
if (speakerVoiceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.set(index, value);
onChanged();
} else {
speakerVoiceConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSpeakerVoiceConfigs(
int index,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder builderForValue) {
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.set(index, builderForValue.build());
onChanged();
} else {
speakerVoiceConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addSpeakerVoiceConfigs(
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice value) {
if (speakerVoiceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.add(value);
onChanged();
} else {
speakerVoiceConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addSpeakerVoiceConfigs(
int index, com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice value) {
if (speakerVoiceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.add(index, value);
onChanged();
} else {
speakerVoiceConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addSpeakerVoiceConfigs(
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder builderForValue) {
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.add(builderForValue.build());
onChanged();
} else {
speakerVoiceConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addSpeakerVoiceConfigs(
int index,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder builderForValue) {
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.add(index, builderForValue.build());
onChanged();
} else {
speakerVoiceConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllSpeakerVoiceConfigs(
java.lang.Iterable<
? extends com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice>
values) {
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, speakerVoiceConfigs_);
onChanged();
} else {
speakerVoiceConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSpeakerVoiceConfigs() {
if (speakerVoiceConfigsBuilder_ == null) {
speakerVoiceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
speakerVoiceConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeSpeakerVoiceConfigs(int index) {
if (speakerVoiceConfigsBuilder_ == null) {
ensureSpeakerVoiceConfigsIsMutable();
speakerVoiceConfigs_.remove(index);
onChanged();
} else {
speakerVoiceConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder
getSpeakerVoiceConfigsBuilder(int index) {
return getSpeakerVoiceConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder
getSpeakerVoiceConfigsOrBuilder(int index) {
if (speakerVoiceConfigsBuilder_ == null) {
return speakerVoiceConfigs_.get(index);
} else {
return speakerVoiceConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<
? extends com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder>
getSpeakerVoiceConfigsOrBuilderList() {
if (speakerVoiceConfigsBuilder_ != null) {
return speakerVoiceConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(speakerVoiceConfigs_);
}
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder
addSpeakerVoiceConfigsBuilder() {
return getSpeakerVoiceConfigsFieldBuilder()
.addBuilder(
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder
addSpeakerVoiceConfigsBuilder(int index) {
return getSpeakerVoiceConfigsFieldBuilder()
.addBuilder(
index,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. A list of configurations for the voices of the speakers. Exactly
* two speaker voice configurations must be provided.
* </pre>
*
* <code>
* repeated .google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder>
getSpeakerVoiceConfigsBuilderList() {
return getSpeakerVoiceConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder>
getSpeakerVoiceConfigsFieldBuilder() {
if (speakerVoiceConfigsBuilder_ == null) {
speakerVoiceConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoice.Builder,
com.google.cloud.texttospeech.v1beta1.MultispeakerPrebuiltVoiceOrBuilder>(
speakerVoiceConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
speakerVoiceConfigs_ = null;
}
return speakerVoiceConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig)
private static final com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig();
}
public static com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MultiSpeakerVoiceConfig> PARSER =
new com.google.protobuf.AbstractParser<MultiSpeakerVoiceConfig>() {
@java.lang.Override
public MultiSpeakerVoiceConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MultiSpeakerVoiceConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MultiSpeakerVoiceConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.texttospeech.v1beta1.MultiSpeakerVoiceConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 38,272 | substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jdk/Resources.java | /*
* Copyright (c) 2015, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.core.jdk;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.graalvm.collections.EconomicMap;
import org.graalvm.collections.MapCursor;
import org.graalvm.collections.UnmodifiableEconomicMap;
import org.graalvm.nativeimage.ImageInfo;
import org.graalvm.nativeimage.ImageSingletons;
import org.graalvm.nativeimage.Platform;
import org.graalvm.nativeimage.Platforms;
import org.graalvm.nativeimage.dynamicaccess.AccessCondition;
import com.oracle.svm.core.AlwaysInline;
import com.oracle.svm.core.BuildPhaseProvider;
import com.oracle.svm.core.ClassLoaderSupport.ConditionWithOrigin;
import com.oracle.svm.core.MissingRegistrationUtils;
import com.oracle.svm.core.SubstrateOptions;
import com.oracle.svm.core.SubstrateUtil;
import com.oracle.svm.core.configure.ConditionalRuntimeValue;
import com.oracle.svm.core.configure.RuntimeConditionSet;
import com.oracle.svm.core.encoder.SymbolEncoder;
import com.oracle.svm.core.feature.AutomaticallyRegisteredFeature;
import com.oracle.svm.core.feature.InternalFeature;
import com.oracle.svm.core.imagelayer.ImageLayerBuildingSupport;
import com.oracle.svm.core.jdk.resources.MissingResourceRegistrationUtils;
import com.oracle.svm.core.jdk.resources.ResourceExceptionEntry;
import com.oracle.svm.core.jdk.resources.ResourceStorageEntry;
import com.oracle.svm.core.jdk.resources.ResourceStorageEntryBase;
import com.oracle.svm.core.jdk.resources.ResourceURLConnection;
import com.oracle.svm.core.jdk.resources.CompressedGlobTrie.CompressedGlobTrie;
import com.oracle.svm.core.jdk.resources.CompressedGlobTrie.GlobTrieNode;
import com.oracle.svm.core.layeredimagesingleton.ImageSingletonLoader;
import com.oracle.svm.core.layeredimagesingleton.ImageSingletonWriter;
import com.oracle.svm.core.layeredimagesingleton.LayeredImageSingleton;
import com.oracle.svm.core.layeredimagesingleton.LayeredImageSingletonSupport;
import com.oracle.svm.core.layeredimagesingleton.MultiLayeredImageSingleton;
import com.oracle.svm.core.metadata.MetadataTracer;
import com.oracle.svm.core.traits.BuiltinTraits.AllAccess;
import com.oracle.svm.core.traits.SingletonLayeredCallbacks;
import com.oracle.svm.core.traits.SingletonLayeredCallbacksSupplier;
import com.oracle.svm.core.traits.SingletonLayeredInstallationKind.MultiLayer;
import com.oracle.svm.core.traits.SingletonTrait;
import com.oracle.svm.core.traits.SingletonTraitKind;
import com.oracle.svm.core.traits.SingletonTraits;
import com.oracle.svm.core.util.ImageHeapMap;
import com.oracle.svm.core.util.VMError;
import com.oracle.svm.util.GlobUtils;
import com.oracle.svm.util.LogUtils;
import com.oracle.svm.util.NativeImageResourcePathRepresentation;
/**
* Support for resources on Substrate VM. All resources that need to be available at run time need
* to be added explicitly during native image generation using {@link #registerResource}.
*
* Registered resources are then available from DynamicHub#getResource classes and
* {@link Target_java_lang_ClassLoader class loaders}.
*/
@SingletonTraits(access = AllAccess.class, layeredCallbacks = Resources.LayeredCallbacks.class, layeredInstallationKind = MultiLayer.class)
public final class Resources {
private static final int INVALID_TIMESTAMP = -1;
public static final char RESOURCES_INTERNAL_PATH_SEPARATOR = '/';
private static final String RESOURCE_KEYS = "resourceKeys";
private static final String RESOURCE_REGISTRATION_STATES = "resourceRegistrationStates";
private static final String PATTERNS = "patterns";
@Platforms(Platform.HOSTED_ONLY.class) //
private SymbolEncoder encoder;
/**
* @return the singleton corresponding to this layer's resources in a layered build, the unique
* singleton otherwise
*/
@Platforms(Platform.HOSTED_ONLY.class)
public static Resources currentLayer() {
return LayeredImageSingletonSupport.singleton().lookup(Resources.class, false, true);
}
/**
* @return an array of singletons corresponding to all layers in a layered build, or an array
* with a single element otherwise
*/
public static Resources[] layeredSingletons() {
assert !SubstrateUtil.HOSTED : "Accessing all layers resources at build time";
return MultiLayeredImageSingleton.getAllLayers(Resources.class);
}
/**
* The hosted map used to collect registered resources. Using a {@link ModuleResourceKey} of
* (module, resourceName) provides implementations for {@code hashCode()} and {@code equals()}
* needed for the map keys. Hosted module instances differ to runtime instances, so the map that
* ends up in the image heap is computed after the runtime module instances have been computed
* {see com.oracle.svm.hosted.ModuleLayerFeature}.
*/
private final EconomicMap<ModuleResourceKey, ConditionalRuntimeValue<ResourceStorageEntryBase>> resources = ImageHeapMap.createNonLayeredMap();
/** Regexp patterns used to match names of resources to be included in the image. */
private final EconomicMap<RequestedPattern, RuntimeConditionSet> requestedPatterns = ImageHeapMap.createNonLayeredMap();
/**
* The string representation of {@link ModuleNameResourceKey} that are already registered in
* previous layers. Since the {@link ModuleInstanceResourceKey} contains a reference to a
* {@link Module}, the {@link Module} name is used instead of the object itself in the string
* representation. This works under the assumption (enforced by
* LayeredModuleSingleton.setPackages) that all modules have a different unique name in Layered
* Images.
*
* The boolean associated to each {@link ModuleNameResourceKey} is true if the registered value
* is complete and false in the case of a negative query.
*/
@Platforms(Platform.HOSTED_ONLY.class) //
private final Map<String, Boolean> previousLayerResources;
/**
* The string representation of {@link RequestedPattern} that are already registered in previous
* layers.
*/
@Platforms(Platform.HOSTED_ONLY.class) //
private final Set<String> previousLayerPatterns;
public record RequestedPattern(String module, String pattern) {
}
public interface ModuleResourceKey {
Module getModule();
String getModuleName();
Object module();
String resource();
}
/**
* In standalone images, the module object is the {@link Module} reference itself.
*/
public record ModuleInstanceResourceKey(Module module, String resource) implements ModuleResourceKey {
public ModuleInstanceResourceKey {
assert !ImageLayerBuildingSupport.buildingImageLayer() : "The ModuleInstanceResourceKey should only be used in standalone images.";
}
@Override
public Module getModule() {
return module;
}
@Override
public String getModuleName() {
if (module == null) {
return null;
}
return module.getName();
}
}
/**
* In Layered Image, only the module name is stored in the record.
*/
public record ModuleNameResourceKey(Object module, String resource) implements ModuleResourceKey {
public ModuleNameResourceKey {
/*
* A null module in the ModuleResourceKey represents any unnamed module, meaning that
* only one marker (null) is needed for all of them and that if the module is not null,
* it is named (see Resources.createStorageKey). This string representation relies on
* the assumption (enforced by LayeredModuleSingleton.setPackages) that a layered image
* build cannot contain two modules with the same name, so Module#getName() is
* guaranteed to be unique for layered images.
*/
assert module == null || module instanceof Module : "The ModuleNameResourceKey constructor should only be called with a Module as first argument";
assert ImageLayerBuildingSupport.buildingImageLayer() : "The ModuleNameResourceKey should only be used in layered images.";
module = (module != null) ? ((Module) module).getName() : module;
}
@Override
public Module getModule() {
throw VMError.shouldNotReachHere("Accessing the module instance of the ModuleResourceKey is not supported in layered images.");
}
@Override
public String getModuleName() {
return (String) module;
}
}
/**
* A resource marked with the NEGATIVE_QUERY_MARKER is a resource included in the image
* according to the resource configuration, but it does not actually exist. Trying to access it
* at runtime will return {@code null} and not throw a
* {@link com.oracle.svm.core.jdk.resources.MissingResourceRegistrationError}.
*/
public static final ResourceStorageEntryBase NEGATIVE_QUERY_MARKER = new ResourceStorageEntryBase();
/**
* The object used to detect that the resource is not reachable according to the metadata. It
* can be returned by the {@link Resources#getAtRuntime} method if the resource was not
* correctly specified in the configuration, but we do not want to throw directly (for example
* when we try to check all the modules for a resource).
*/
public static final ResourceStorageEntryBase MISSING_METADATA_MARKER = new ResourceStorageEntryBase();
/**
* Embedding a resource into an image is counted as a modification. Since all resources are
* baked into the image during image generation, we save this value so that it can be fetched
* later by calling {@link ResourceURLConnection#getLastModified()}.
*/
private long lastModifiedTime = INVALID_TIMESTAMP;
private GlobTrieNode<ConditionWithOrigin> resourcesTrieRoot;
@Platforms(Platform.HOSTED_ONLY.class) //
private Function<Module, Module> hostedToRuntimeModuleMapper;
Resources() {
this(Map.of(), Set.of());
}
Resources(Map<String, Boolean> previousLayerResources, Set<String> previousLayerPatterns) {
this.previousLayerResources = previousLayerResources;
this.previousLayerPatterns = previousLayerPatterns;
}
@Platforms(Platform.HOSTED_ONLY.class)
public void setEncoder(SymbolEncoder encoder) {
this.encoder = encoder;
}
public GlobTrieNode<ConditionWithOrigin> getResourcesTrieRoot() {
return resourcesTrieRoot;
}
@Platforms(Platform.HOSTED_ONLY.class)
public void setResourcesTrieRoot(GlobTrieNode<ConditionWithOrigin> resourcesTrieRoot) {
this.resourcesTrieRoot = resourcesTrieRoot;
}
public void forEachResource(BiConsumer<ModuleResourceKey, ConditionalRuntimeValue<ResourceStorageEntryBase>> action) {
MapCursor<ModuleResourceKey, ConditionalRuntimeValue<ResourceStorageEntryBase>> entries = resources.getEntries();
while (entries.advance()) {
action.accept(entries.getKey(), entries.getValue());
}
}
@Platforms(Platform.HOSTED_ONLY.class)
public UnmodifiableEconomicMap<ModuleResourceKey, ConditionalRuntimeValue<ResourceStorageEntryBase>> resources() {
return resources;
}
public static long getLastModifiedTime() {
var singletons = layeredSingletons();
return singletons[singletons.length - 1].lastModifiedTime;
}
public static String moduleName(Module module) {
return module == null ? null : module.getName();
}
public static ModuleResourceKey createStorageKey(Module module, String resourceName) {
Module m = module != null && module.isNamed() ? module : null;
if (ImageInfo.inImageBuildtimeCode()) {
if (m != null) {
m = currentLayer().hostedToRuntimeModuleMapper.apply(m);
}
}
return ImageLayerBuildingSupport.buildingImageLayer() ? new ModuleNameResourceKey(m, resourceName) : new ModuleInstanceResourceKey(m, resourceName);
}
@Platforms(Platform.HOSTED_ONLY.class) //
public void setHostedToRuntimeModuleMapper(Function<Module, Module> hostedToRuntimeModuleMapper) {
this.hostedToRuntimeModuleMapper = hostedToRuntimeModuleMapper;
}
@Platforms(Platform.HOSTED_ONLY.class)
public static Set<String> getIncludedResourcesModules() {
return StreamSupport.stream(currentLayer().resources.getKeys().spliterator(), false)
.map(ModuleResourceKey::getModuleName)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
}
public static byte[] inputStreamToByteArray(InputStream is) {
try {
return is.readAllBytes();
} catch (IOException ex) {
throw VMError.shouldNotReachHere(ex);
}
}
private void updateTimeStamp() {
if (lastModifiedTime == INVALID_TIMESTAMP) {
lastModifiedTime = new Date().getTime();
}
}
private void addResource(ModuleResourceKey key, ConditionalRuntimeValue<ResourceStorageEntryBase> entry) {
Boolean previousLayerData = ImageLayerBuildingSupport.buildingImageLayer() ? previousLayerResources.get(key.toString()) : null;
/* GR-66387: The runtime condition should be combined across layers. */
if (previousLayerData == null || (!previousLayerData && entry.getValueUnconditionally() != NEGATIVE_QUERY_MARKER)) {
resources.put(key, entry);
}
}
@Platforms(Platform.HOSTED_ONLY.class)
private void addEntry(Module module, String resourceName, boolean isDirectory, byte[] data, boolean fromJar, boolean isNegativeQuery) {
VMError.guarantee(!BuildPhaseProvider.isAnalysisFinished(), "Trying to add a resource entry after analysis.");
Module m = module != null && module.isNamed() ? module : null;
synchronized (resources) {
ModuleResourceKey key = createStorageKey(m, resourceName);
RuntimeConditionSet conditionSet = RuntimeConditionSet.emptySet();
ConditionalRuntimeValue<ResourceStorageEntryBase> entry = resources.get(key);
if (isNegativeQuery) {
if (entry == null) {
addResource(key, new ConditionalRuntimeValue<>(conditionSet, NEGATIVE_QUERY_MARKER));
}
return;
}
if (entry == null || entry.getValueUnconditionally() == NEGATIVE_QUERY_MARKER) {
updateTimeStamp();
entry = new ConditionalRuntimeValue<>(conditionSet, new ResourceStorageEntry(isDirectory, fromJar));
addResource(key, entry);
} else {
if (key.module() != null) {
// if the entry already exists, and it comes from a module, it is the same entry
// that we registered at some point before
return;
}
}
entry.getValueUnconditionally().addData(data);
}
}
@Platforms(Platform.HOSTED_ONLY.class)
public static void registerResource(String resourceName, InputStream is) {
currentLayer().registerResource(null, resourceName, is, true);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerResource(Module module, String resourceName, byte[] resourceContent) {
addEntry(module, resourceName, false, resourceContent, true, false);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerResource(Module module, String resourceName, InputStream is, boolean fromJar) {
addEntry(module, resourceName, false, inputStreamToByteArray(is), fromJar, false);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerDirectoryResource(Module module, String resourceDirName, String content, boolean fromJar) {
/*
* A directory content represents the names of all files and subdirectories located in the
* specified directory, separated with new line delimiter and joined into one string which
* is later converted into a byte array and placed into the resources map.
*/
addEntry(module, resourceDirName, true, content.getBytes(), fromJar, false);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerIOException(Module module, String resourceName, IOException e, boolean linkAtBuildTime) {
if (linkAtBuildTime) {
if (SubstrateOptions.ThrowLinkAtBuildTimeIOExceptions.getValue()) {
throw new RuntimeException("Resource " + resourceName + " from module " + moduleName(module) + " produced an IOException.", e);
} else {
LogUtils.warning("Resource " + resourceName + " from module " + moduleName(module) + " produced the following IOException: " + e.getClass().getTypeName() + ": " + e.getMessage());
}
}
ModuleResourceKey key = createStorageKey(module, resourceName);
synchronized (resources) {
updateTimeStamp();
addResource(key, new ConditionalRuntimeValue<>(RuntimeConditionSet.emptySet(), new ResourceExceptionEntry(e)));
}
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerNegativeQuery(String resourceName) {
registerNegativeQuery(null, resourceName);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerNegativeQuery(Module module, String resourceName) {
addEntry(module, resourceName, false, null, false, true);
}
@Platforms(Platform.HOSTED_ONLY.class)
public void registerIncludePattern(AccessCondition condition, String module, String pattern) {
assert MissingRegistrationUtils.throwMissingRegistrationErrors();
synchronized (requestedPatterns) {
updateTimeStamp();
addPattern(new RequestedPattern(encoder.encodeModule(module), handleEscapedCharacters(pattern)), RuntimeConditionSet.createHosted(condition));
}
}
@Platforms(Platform.HOSTED_ONLY.class)
private void addPattern(RequestedPattern pattern, RuntimeConditionSet condition) {
if (!previousLayerPatterns.contains(pattern.toString())) {
requestedPatterns.put(pattern, condition);
}
}
@Platforms(Platform.HOSTED_ONLY.class)//
private static final String BEGIN_ESCAPED_SEQUENCE = "\\Q";
@Platforms(Platform.HOSTED_ONLY.class)//
private static final String END_ESCAPED_SEQUENCE = "\\E";
/*
* This handles generated include patterns which start and end with \Q and \E. The actual
* resource name is located in between those tags.
*/
@Platforms(Platform.HOSTED_ONLY.class)
private static String handleEscapedCharacters(String pattern) {
if (pattern.startsWith(BEGIN_ESCAPED_SEQUENCE) && pattern.endsWith(END_ESCAPED_SEQUENCE)) {
return pattern.substring(BEGIN_ESCAPED_SEQUENCE.length(), pattern.length() - END_ESCAPED_SEQUENCE.length());
}
return pattern;
}
private static boolean hasTrailingSlash(String resourceName) {
return resourceName.endsWith("/");
}
private static String removeTrailingSlash(String resourceName) {
return hasTrailingSlash(resourceName) ? resourceName.substring(0, resourceName.length() - 1) : resourceName;
}
private static boolean wasAlreadyInCanonicalForm(String resourceName, String canonicalResourceName) {
return resourceName.equals(canonicalResourceName) || removeTrailingSlash(resourceName).equals(canonicalResourceName);
}
public static ResourceStorageEntryBase getAtRuntime(String name) {
return getAtRuntime(null, name, false);
}
/**
* Looks up a resource from {@code module} with name {@code resourceName}.
* <p>
* The {@code probe} parameter indicates whether the caller is probing for the existence of a
* resource. If {@code probe} is true, failed resource lookups return will not throw missing
* registration errors and may instead return {@link #MISSING_METADATA_MARKER}.
* <p>
* Tracing note: When this method is used for probing, only successful metadata matches will be
* traced. If a probing result is {@link #MISSING_METADATA_MARKER}, the caller must explicitly
* trace the missing metadata.
*/
public static ResourceStorageEntryBase getAtRuntime(Module module, String resourceName, boolean probe) {
VMError.guarantee(ImageInfo.inImageRuntimeCode(), "This function should be used only at runtime.");
String canonicalResourceName = NativeImageResourcePathRepresentation.toCanonicalForm(resourceName);
String moduleName = moduleName(module);
ConditionalRuntimeValue<ResourceStorageEntryBase> entry = getEntry(module, canonicalResourceName);
if (entry == null) {
if (MissingRegistrationUtils.throwMissingRegistrationErrors()) {
if (missingResourceMatchesIncludePattern(resourceName, moduleName) || missingResourceMatchesIncludePattern(canonicalResourceName, moduleName)) {
// This resource name matches a pattern/glob from the provided metadata, but no
// resource with the name actually exists. Do not report missing metadata.
traceResource(resourceName, moduleName);
return null;
}
traceResourceMissingMetadata(resourceName, moduleName, probe);
return missingMetadata(module, resourceName, probe);
} else {
// NB: Without exact reachability metadata, resource include patterns are not
// stored in the image heap, so we cannot reliably identify if the resource was
// included at build time. Assume it is missing.
traceResourceMissingMetadata(resourceName, moduleName, probe);
return null;
}
}
traceResource(resourceName, moduleName);
if (!entry.getConditions().satisfied()) {
return missingMetadata(module, resourceName, probe);
}
ResourceStorageEntryBase unconditionalEntry = entry.getValue();
assert unconditionalEntry != null : "Already checked above that the condition is satisfied";
if (unconditionalEntry.isException()) {
throw new RuntimeException(unconditionalEntry.getException());
}
if (unconditionalEntry == NEGATIVE_QUERY_MARKER) {
return null;
}
if (unconditionalEntry.isFromJar() && !wasAlreadyInCanonicalForm(resourceName, canonicalResourceName)) {
/*
* The resource originally came from a jar file, thus behave like ZipFileSystem behaves
* for non-canonical paths.
*/
return null;
}
if (!unconditionalEntry.isDirectory() && hasTrailingSlash(resourceName)) {
/*
* If this is an actual resource file (not a directory) we do not tolerate a trailing
* slash.
*/
return null;
}
return unconditionalEntry;
}
@AlwaysInline("tracing should fold away when disabled")
private static void traceResource(String resourceName, String moduleName) {
if (MetadataTracer.enabled()) {
MetadataTracer.singleton().traceResource(resourceName, moduleName);
}
}
@AlwaysInline("tracing should fold away when disabled")
private static void traceResourceMissingMetadata(String resourceName, String moduleName) {
traceResourceMissingMetadata(resourceName, moduleName, false);
}
@AlwaysInline("tracing should fold away when disabled")
private static void traceResourceMissingMetadata(String resourceName, String moduleName, boolean probe) {
if (MetadataTracer.enabled() && !probe) {
// Do not trace missing metadata for probing queries, otherwise we'll trace an entry for
// every module. The caller is responsible for tracing missing entries if it uses
// probing.
MetadataTracer.singleton().traceResource(resourceName, moduleName);
}
}
/**
* Checks whether the given missing resource is matched by a pattern/glob registered at build
* time. In such a case, we should not report missing metadata.
*/
private static boolean missingResourceMatchesIncludePattern(String resourceName, String moduleName) {
VMError.guarantee(MissingRegistrationUtils.throwMissingRegistrationErrors(), "include patterns are only stored in the image with exact reachability metadata");
String glob = GlobUtils.transformToTriePath(resourceName, moduleName);
for (var r : layeredSingletons()) {
MapCursor<RequestedPattern, RuntimeConditionSet> cursor = r.requestedPatterns.getEntries();
while (cursor.advance()) {
RequestedPattern moduleResourcePair = cursor.getKey();
if (Objects.equals(moduleName, moduleResourcePair.module) && matchResource(moduleResourcePair.pattern, resourceName) && cursor.getValue().satisfied()) {
return true;
}
}
if (CompressedGlobTrie.match(r.getResourcesTrieRoot(), glob)) {
return true;
}
}
return false;
}
private static ConditionalRuntimeValue<ResourceStorageEntryBase> getEntry(Module module, String canonicalResourceName) {
for (var r : layeredSingletons()) {
ConditionalRuntimeValue<ResourceStorageEntryBase> entry = r.resources.get(createStorageKey(module, canonicalResourceName));
if (entry != null) {
return entry;
}
}
return null;
}
private static ResourceStorageEntryBase missingMetadata(Module module, String resourceName, boolean probe) {
if (!probe) {
MissingResourceRegistrationUtils.reportResourceAccess(module, resourceName);
}
return MISSING_METADATA_MARKER;
}
@SuppressWarnings("deprecation")
private static URL createURL(Module module, String resourceName, int index) {
try {
String refPart = index != 0 ? '#' + Integer.toString(index) : "";
String moduleName = moduleName(module);
return new URL(JavaNetSubstitutions.RESOURCE_PROTOCOL, moduleName, -1, '/' + resourceName + refPart);
} catch (MalformedURLException ex) {
throw new IllegalStateException(ex);
}
}
public static URL createURL(String resourceName) {
return createURL(null, resourceName);
}
public static URL createURL(Module module, String resourceName) {
if (resourceName == null) {
return null;
}
Enumeration<URL> urls = createURLs(module, resourceName);
return urls.hasMoreElements() ? urls.nextElement() : null;
}
/* Avoid pulling in the URL class when only an InputStream is needed. */
public static InputStream createInputStream(Module module, String resourceName) {
if (resourceName == null) {
return null;
}
ResourceStorageEntryBase entry = findResourceForInputStream(module, resourceName);
if (entry == MISSING_METADATA_MARKER) {
traceResourceMissingMetadata(resourceName, moduleName(module));
MissingResourceRegistrationUtils.reportResourceAccess(module, resourceName);
return null;
} else if (entry == null) {
return null;
}
byte[][] data = entry.getData();
return data.length == 0 ? null : new ByteArrayInputStream(data[0]);
}
private static ResourceStorageEntryBase findResourceForInputStream(Module module, String resourceName) {
ResourceStorageEntryBase result = getAtRuntime(module, resourceName, true);
if (moduleName(module) == null && (result == MISSING_METADATA_MARKER || result == null)) {
/*
* If module is not specified or is an unnamed module and entry was not found as
* classpath-resource we have to search for the resource in all modules in the image.
*/
for (Module m : RuntimeModuleSupport.singleton().getBootLayer().modules()) {
ResourceStorageEntryBase entry = getAtRuntime(m, resourceName, true);
if (entry != MISSING_METADATA_MARKER) {
if (entry != null) {
// resource found
return entry;
} else {
// found a negative query. remember this result but keep trying in case some
// other module supplies an actual resource.
result = null;
}
}
}
}
return result;
}
public static Enumeration<URL> createURLs(String resourceName) {
return createURLs(null, resourceName);
}
public static Enumeration<URL> createURLs(Module module, String resourceName) {
if (resourceName == null) {
return null;
}
boolean missingMetadata = true;
List<URL> resourcesURLs = new ArrayList<>();
String canonicalResourceName = NativeImageResourcePathRepresentation.toCanonicalForm(resourceName);
if (hasTrailingSlash(resourceName)) {
canonicalResourceName += "/";
}
/* If moduleName was unspecified we have to consider all modules in the image */
if (moduleName(module) == null) {
for (Module m : RuntimeModuleSupport.singleton().getBootLayer().modules()) {
ResourceStorageEntryBase entry = getAtRuntime(m, resourceName, true);
if (entry != MISSING_METADATA_MARKER) {
missingMetadata = false;
addURLEntries(resourcesURLs, (ResourceStorageEntry) entry, m, canonicalResourceName);
}
}
}
ResourceStorageEntryBase explicitEntry = getAtRuntime(module, resourceName, true);
if (explicitEntry != MISSING_METADATA_MARKER) {
missingMetadata = false;
addURLEntries(resourcesURLs, (ResourceStorageEntry) explicitEntry, module, canonicalResourceName);
}
if (missingMetadata) {
MissingResourceRegistrationUtils.reportResourceAccess(module, resourceName);
}
if (resourcesURLs.isEmpty()) {
return Collections.emptyEnumeration();
}
return Collections.enumeration(resourcesURLs);
}
private static void addURLEntries(List<URL> resourcesURLs, ResourceStorageEntry entry, Module module, String canonicalResourceName) {
if (entry == null) {
return;
}
for (int index = 0; index < entry.getData().length; index++) {
resourcesURLs.add(createURL(module, canonicalResourceName, index));
}
}
private static boolean matchResource(String pattern, String resource) {
if (pattern.equals(resource)) {
return true;
}
if (!pattern.contains("*")) {
return false;
}
if (pattern.endsWith("*")) {
return resource.startsWith(pattern.substring(0, pattern.length() - 1));
}
String[] parts = pattern.split("\\*");
int i = parts.length - 1;
boolean found = false;
while (i > 0 && !found) {
found = !parts[i - 1].endsWith("\\");
i--;
}
if (!found) {
return false;
}
String start = String.join("*", Arrays.copyOfRange(parts, 0, i + 1));
String end = String.join("*", Arrays.copyOfRange(parts, i + 1, parts.length));
return resource.startsWith(start) && resource.endsWith(end);
}
static class LayeredCallbacks extends SingletonLayeredCallbacksSupplier {
@Override
public SingletonTrait getLayeredCallbacksTrait() {
return new SingletonTrait(SingletonTraitKind.LAYERED_CALLBACKS, new SingletonLayeredCallbacks<Resources>() {
@Override
public LayeredImageSingleton.PersistFlags doPersist(ImageSingletonWriter writer, Resources singleton) {
List<String> resourceKeys = new ArrayList<>();
List<Boolean> resourceRegistrationStates = new ArrayList<>();
Set<String> patterns = new HashSet<>(singleton.previousLayerPatterns);
var cursor = singleton.resources.getEntries();
while (cursor.advance()) {
resourceKeys.add(cursor.getKey().toString());
boolean isNegativeQuery = cursor.getValue().getValueUnconditionally() == NEGATIVE_QUERY_MARKER;
resourceRegistrationStates.add(!isNegativeQuery);
}
for (var entry : singleton.previousLayerResources.entrySet()) {
/*
* If a complete entry overwrites a negative query from a previous layer,
* the previousLayerResources map entry needs to be skipped to register the
* new entry for extension layers.
*/
if (!resourceKeys.contains(entry.getKey())) {
resourceKeys.add(entry.getKey());
resourceRegistrationStates.add(entry.getValue());
}
}
singleton.requestedPatterns.getKeys().forEach(p -> patterns.add(p.toString()));
writer.writeStringList(RESOURCE_KEYS, resourceKeys);
writer.writeBoolList(RESOURCE_REGISTRATION_STATES, resourceRegistrationStates);
writer.writeStringList(PATTERNS, patterns.stream().toList());
return LayeredImageSingleton.PersistFlags.CREATE;
}
@Override
public Class<? extends LayeredSingletonInstantiator<?>> getSingletonInstantiator() {
return SingletonInstantiator.class;
}
});
}
}
static class SingletonInstantiator implements SingletonLayeredCallbacks.LayeredSingletonInstantiator<Resources> {
@Override
public Resources createFromLoader(ImageSingletonLoader loader) {
List<String> previousLayerResourceKeys = loader.readStringList(RESOURCE_KEYS);
List<Boolean> previousLayerRegistrationStates = loader.readBoolList(RESOURCE_REGISTRATION_STATES);
Map<String, Boolean> previousLayerResources = new HashMap<>();
for (int i = 0; i < previousLayerResourceKeys.size(); ++i) {
previousLayerResources.put(previousLayerResourceKeys.get(i), previousLayerRegistrationStates.get(i));
}
Set<String> previousLayerPatterns = Set.copyOf(loader.readStringList(PATTERNS));
return new Resources(Collections.unmodifiableMap(previousLayerResources), previousLayerPatterns);
}
}
}
@AutomaticallyRegisteredFeature
final class ResourcesFeature implements InternalFeature {
@Override
public void afterRegistration(AfterRegistrationAccess access) {
if (ImageLayerBuildingSupport.firstImageBuild()) {
ImageSingletons.add(Resources.class, new Resources());
}
Resources.currentLayer().setEncoder(SymbolEncoder.singleton());
}
@Override
public void afterCompilation(AfterCompilationAccess access) {
/*
* The resources embedded in the image heap are read-only at run time. Note that we do not
* mark the collection data structures as read-only because Java collections have all sorts
* of lazily initialized fields. Only the byte[] arrays themselves can be safely made
* read-only.
*/
for (ConditionalRuntimeValue<ResourceStorageEntryBase> entry : Resources.currentLayer().resources().getValues()) {
var unconditionalEntry = entry.getValueUnconditionally();
if (unconditionalEntry.hasData()) {
for (byte[] resource : unconditionalEntry.getData()) {
access.registerAsImmutable(resource);
}
}
}
}
}
|
apache/lens | 37,903 | lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.driver.hive;
import static org.testng.Assert.*;
import java.io.*;
import java.text.ParseException;
import java.util.*;
import org.apache.lens.api.LensConf;
import org.apache.lens.api.Priority;
import org.apache.lens.api.query.QueryHandle;
import org.apache.lens.cube.metadata.FactPartition;
import org.apache.lens.cube.metadata.UpdatePeriod;
import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.driver.*;
import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
import org.apache.lens.server.api.driver.hooks.DriverQueryHook;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.query.ExplainQueryContext;
import org.apache.lens.server.api.query.PreparedQueryContext;
import org.apache.lens.server.api.query.QueryContext;
import org.apache.lens.server.api.query.cost.QueryCost;
import org.apache.lens.server.api.query.priority.CostRangePriorityDecider;
import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
import org.apache.lens.server.api.user.MockDriverQueryHook;
import org.apache.lens.server.api.util.LensUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.HiveDriverRunHook;
import org.apache.hadoop.hive.ql.HiveDriverRunHookContext;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.ColumnDescriptor;
import org.testng.annotations.*;
import com.beust.jcommander.internal.Maps;
import com.google.common.collect.Lists;
/**
* The Class TestHiveDriver.
*/
public class TestHiveDriver {
/** The Constant TEST_DATA_FILE. */
public static final String TEST_DATA_FILE = "testdata/testdata1.data";
/** The test output dir. */
private final String testOutputDir = "target/" + this.getClass().getSimpleName() + "/test-output";
/** The conf. */
protected Configuration driverConf = new Configuration();
protected HiveConf hiveConf = new HiveConf();
protected Configuration queryConf = new Configuration();
/** The driver. */
protected HiveDriver driver;
/** Driver list * */
protected Collection<LensDriver> drivers;
/** The data base. */
String dataBase = this.getClass().getSimpleName().toLowerCase();
protected String sessionid;
protected SessionState ss;
private CostRangePriorityDecider alwaysNormalPriorityDecider
= new CostRangePriorityDecider(new CostToPriorityRangeConf(""));
/**
* Before test.
*
* @throws Exception the exception
*/
@BeforeTest
public void beforeTest() throws Exception {
// Check if hadoop property set
System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
assertNotNull(System.getProperty("hadoop.bin.path"));
createDriver();
ss = new SessionState(hiveConf, "testuser");
SessionState.start(ss);
Hive client = Hive.get(hiveConf);
Database database = new Database();
database.setName(dataBase);
client.createDatabase(database, true);
SessionState.get().setCurrentDatabase(dataBase);
sessionid = SessionState.get().getSessionId();
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
QueryContext context = createContext("USE " + dataBase, this.queryConf);
driver.execute(context);
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
}
protected void createDriver() throws LensException {
driverConf.addResource("drivers/hive/hive1/hivedriver-site.xml");
driverConf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class);
driverConf.setClass(LensConfConstants.DRIVER_HOOK_CLASSES_SFX, MockDriverQueryHook.class, DriverQueryHook.class);
driverConf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
driverConf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
driver = new HiveDriver();
driver.configure(driverConf, "hive", "hive1");
drivers = Lists.<LensDriver>newArrayList(driver);
System.out.println("TestHiveDriver created");
}
@BeforeMethod
public void setDB() {
SessionState.setCurrentSessionState(ss);
}
protected QueryContext createContext(final String query, Configuration conf) throws LensException {
QueryContext context = new QueryContext(query, "testuser", new LensConf(), conf, drivers);
// session id has to be set before calling setDriverQueriesAndPlans
context.setLensSessionIdentifier(sessionid);
return context;
}
protected QueryContext createContext(final String query, Configuration conf, LensDriver driver) throws LensException {
QueryContext context = new QueryContext(query, "testuser", new LensConf(), conf, Arrays.asList(driver));
// session id has to be set before calling setDriverQueriesAndPlans
context.setLensSessionIdentifier(sessionid);
return context;
}
protected QueryContext createContext(PreparedQueryContext query, Configuration conf) {
QueryContext context = new QueryContext(query, "testuser", new LensConf(), conf);
context.setLensSessionIdentifier(sessionid);
return context;
}
protected ExplainQueryContext createExplainContext(final String query, Configuration conf) {
ExplainQueryContext ectx = new ExplainQueryContext(UUID.randomUUID().toString(), query, "testuser", null, conf,
drivers);
ectx.setLensSessionIdentifier(sessionid);
return ectx;
}
/**
* After test.
*
* @throws Exception the exception
*/
@AfterTest
public void afterTest() throws Exception {
verifyThriftLogs();
driver.close();
Hive.get(hiveConf).dropDatabase(dataBase, true, true, true);
}
private void verifyThriftLogs() throws IOException {
BufferedReader br = new BufferedReader(new FileReader(new File("target/test.log")));
for (String line = br.readLine(); line != null; line = br.readLine()) {
if (line.contains("Update from hive")) {
return;
}
}
fail("No updates from hive found in the logs");
}
/**
* Creates the test table.
*
* @param tableName the table name
* @throws Exception the exception
*/
protected void createTestTable(String tableName) throws Exception {
int handleSize = getHandleSize();
System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)" + " TBLPROPERTIES ('"
+ LensConfConstants.STORAGE_COST + "'='500')";
String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName;
// Create test table
QueryContext context = createContext(createTable, queryConf);
LensResultSet resultSet = driver.execute(context);
assertNull(resultSet);
// Load some data into the table
context = createContext(dataLoad, queryConf);
resultSet = driver.execute(context);
assertNull(resultSet);
assertHandleSize(handleSize);
}
/**
* Creates the test table.
*
* @param tableName the table name
* @throws Exception the exception
*/
protected void createPartitionedTable(String tableName) throws Exception {
int handleSize = getHandleSize();
System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)"
+ " PARTITIONED BY (dt string) TBLPROPERTIES ('"
+ LensConfConstants.STORAGE_COST + "'='500')";
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
// Craete again
QueryContext context = createContext(createTable, queryConf);
LensResultSet resultSet = driver.execute(context);
assertNull(resultSet);
// Load some data into the table
String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName
+ " partition (dt='today')";
context = createContext(dataLoad, queryConf);
resultSet = driver.execute(context);
assertNull(resultSet);
assertHandleSize(handleSize);
}
// Tests
/**
* Test insert overwrite conf.
*
* @throws Exception the exception
*/
@Test
public void testInsertOverwriteConf() throws Exception {
createTestTable("test_insert_overwrite");
queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
String query = "SELECT ID FROM test_insert_overwrite";
QueryContext context = createContext(query, queryConf);
driver.addPersistentPath(context);
assertEquals(context.getUserQuery(), query);
assertNotNull(context.getDriverContext().getDriverQuery(driver));
assertEquals(context.getDriverContext().getDriverQuery(driver), context.getUserQuery());
}
/**
* Test temptable.
*
* @throws Exception the exception
*/
@Test
public void testTemptable() throws Exception {
int handleSize = getHandleSize();
createTestTable("test_temp");
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
Hive.get(hiveConf).dropTable("test_temp_output");
String query = "CREATE TABLE test_temp_output AS SELECT ID FROM test_temp";
QueryContext context = createContext(query, queryConf);
LensResultSet resultSet = driver.execute(context);
assertNull(resultSet);
assertHandleSize(handleSize);
// fetch results from temp table
String select = "SELECT * FROM test_temp_output";
context = createContext(select, queryConf);
resultSet = driver.execute(context);
assertHandleSize(handleSize);
validateInMemoryResult(resultSet, "test_temp_output");
assertHandleSize(handleSize);
}
/**
* Test execute query.
*
* @throws Exception the exception
*/
@Test
public void testExecuteQuery() throws Exception {
int handleSize = getHandleSize();
createTestTable("test_execute");
LensResultSet resultSet = null;
// Execute a select query
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
String select = "SELECT ID FROM test_execute";
QueryContext context = createContext(select, queryConf);
resultSet = driver.execute(context);
assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
validateInMemoryResult(resultSet);
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
context = createContext(select, queryConf);
resultSet = driver.execute(context);
validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), false);
queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
select = "SELECT ID, null, ID FROM test_execute";
context = createContext(select, queryConf);
resultSet = driver.execute(context);
validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), true);
assertHandleSize(handleSize);
}
/**
* Validate in memory result.
*
* @param resultSet the result set
* @throws LensException the lens exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private void validateInMemoryResult(LensResultSet resultSet) throws LensException, IOException {
validateInMemoryResult(resultSet, null);
}
/**
* Validate in memory result.
*
* @param resultSet the result set
* @param outputTable the output table
* @throws LensException the lens exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private void validateInMemoryResult(LensResultSet resultSet, String outputTable) throws LensException, IOException {
assertNotNull(resultSet);
assertTrue(resultSet instanceof HiveInMemoryResultSet);
HiveInMemoryResultSet inmemrs = (HiveInMemoryResultSet) resultSet;
// check metadata
LensResultSetMetadata rsMeta = inmemrs.getMetadata();
List<ColumnDescriptor> columns = rsMeta.getColumns();
assertNotNull(columns);
assertEquals(columns.size(), 1);
String expectedCol = "";
if (outputTable != null) {
expectedCol += outputTable + ".";
}
expectedCol += "ID";
assertTrue(columns.get(0).getName().toLowerCase().equals(expectedCol.toLowerCase())
|| columns.get(0).getName().toLowerCase().equals("ID".toLowerCase()));
assertEquals(columns.get(0).getTypeName().toLowerCase(), "STRING".toLowerCase());
List<String> expectedRows = new ArrayList<String>();
// Read data from the test file into expectedRows
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(TEST_DATA_FILE)));
String line = "";
while ((line = br.readLine()) != null) {
expectedRows.add(line.trim());
}
br.close();
List<String> actualRows = new ArrayList<String>();
while (inmemrs.hasNext()) {
List<Object> row = inmemrs.next().getValues();
actualRows.add((String) row.get(0));
}
assertEquals(actualRows, expectedRows);
}
/**
* The Class FailHook.
*/
public static class FailHook implements HiveDriverRunHook {
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.hive.ql.HiveDriverRunHook#postDriverRun(org.apache.hadoop.hive.ql.HiveDriverRunHookContext)
*/
@Override
public void postDriverRun(HiveDriverRunHookContext arg0) throws Exception {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see org.apache.hadoop.hive.ql.HiveDriverRunHook#preDriverRun(org.apache.hadoop.hive.ql.HiveDriverRunHookContext)
*/
@Override
public void preDriverRun(HiveDriverRunHookContext arg0) throws Exception {
throw new LensException("Failing this run");
}
}
// executeAsync
/**
* Test execute query async.
*
* @throws Exception the exception
*/
@Test
public void testExecuteQueryAsync() throws Exception {
int handleSize = getHandleSize();
createTestTable("test_execute_sync");
// Now run a command that would fail
String expectFail = "SELECT ID FROM test_execute_sync";
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
Configuration failConf = new Configuration(queryConf);
failConf.set("hive.exec.driver.run.hooks", FailHook.class.getName());
QueryContext context = createContext(expectFail, failConf);
driver.executeAsync(context);
assertHandleSize(handleSize + 1);
validateExecuteAsync(context, DriverQueryState.FAILED, true, false);
assertHandleSize(handleSize + 1);
driver.closeQuery(context.getQueryHandle());
assertHandleSize(handleSize);
// Async select query
String select = "SELECT ID FROM test_execute_sync";
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
context = createContext(select, queryConf);
driver.executeAsync(context);
assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
assertNotNull(context.getDriverConf(driver).get("mapred.job.priority"));
assertHandleSize(handleSize + 1);
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, false, false);
driver.closeQuery(context.getQueryHandle());
assertHandleSize(handleSize);
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
context = createContext(select, queryConf);
driver.executeAsync(context);
assertHandleSize(handleSize + 1);
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, false);
driver.closeQuery(context.getQueryHandle());
assertHandleSize(handleSize);
queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
select = "SELECT ID, null, ID FROM test_execute_sync";
context = createContext(select, queryConf);
driver.executeAsync(context);
assertHandleSize(handleSize + 1);
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, true);
driver.closeQuery(context.getQueryHandle());
assertHandleSize(handleSize);
}
/**
* Validate execute async.
*
* @param ctx the ctx
* @param finalState the final state
* @param isPersistent the is persistent
* @param formatNulls the format nulls
* @param driver the driver
* @throws Exception the exception
*/
protected void validateExecuteAsync(QueryContext ctx, DriverQueryState finalState, boolean isPersistent,
boolean formatNulls, HiveDriver driver) throws Exception {
waitForAsyncQuery(ctx, driver);
driver.updateStatus(ctx);
assertEquals(ctx.getDriverStatus().getState(), finalState, "Expected query to finish with" + finalState);
assertTrue(ctx.getDriverStatus().getDriverFinishTime() > 0);
if (finalState.equals(DriverQueryState.SUCCESSFUL)) {
System.out.println("Progress:" + ctx.getDriverStatus().getProgressMessage());
assertNotNull(ctx.getDriverStatus().getProgressMessage());
if (!isPersistent) {
validateInMemoryResult(driver.fetchResultSet(ctx));
} else {
validatePersistentResult(driver.fetchResultSet(ctx), TEST_DATA_FILE, ctx.getHDFSResultDir(), formatNulls);
}
} else if (finalState.equals(DriverQueryState.FAILED)) {
System.out.println("Error:" + ctx.getDriverStatus().getErrorMessage());
System.out.println("Status:" + ctx.getDriverStatus().getStatusMessage());
assertNotNull(ctx.getDriverStatus().getErrorMessage());
}
}
/**
* Validate execute async.
*
* @param ctx the ctx
* @param finalState the final state
* @param isPersistent the is persistent
* @param formatNulls the format nulls
* @throws Exception the exception
*/
protected void validateExecuteAsync(QueryContext ctx, DriverQueryState finalState, boolean isPersistent,
boolean formatNulls) throws Exception {
validateExecuteAsync(ctx, finalState, isPersistent, formatNulls, driver);
}
/**
* Test cancel async query.
*
* @throws Exception the exception
*/
@Test
public void testCancelAsyncQuery() throws Exception {
int handleSize = getHandleSize();
createTestTable("test_cancel_async");
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
QueryContext context = createContext("select a.id aid, b.id bid from "
+ "((SELECT ID FROM test_cancel_async) a full outer join (select id from test_cancel_async) b)",
queryConf);
driver.executeAsync(context);
driver.cancelQuery(context.getQueryHandle());
driver.updateStatus(context);
assertEquals(context.getDriverStatus().getState(), DriverQueryState.CANCELED, "Expecting query to be cancelled");
driver.closeQuery(context.getQueryHandle());
assertHandleSize(handleSize);
try {
driver.cancelQuery(context.getQueryHandle());
fail("Cancel on closed query should throw error");
} catch (LensException exc) {
assertTrue(exc.getMessage().startsWith("Query not found"));
}
}
/**
* Validate persistent result.
*
* @param resultSet the result set
* @param dataFile the data file
* @param outptuDir the outptu dir
* @param formatNulls the format nulls
* @throws Exception the exception
*/
private void validatePersistentResult(LensResultSet resultSet, String dataFile, Path outptuDir, boolean formatNulls)
throws Exception {
assertTrue(resultSet instanceof HivePersistentResultSet, "resultset class: " + resultSet.getClass().getName());
HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
String path = persistentResultSet.getOutputPath();
Path actualPath = new Path(path);
FileSystem fs = actualPath.getFileSystem(driverConf);
assertEquals(actualPath, fs.makeQualified(outptuDir));
List<String> actualRows = new ArrayList<String>();
for (FileStatus stat : fs.listStatus(actualPath, new PathFilter() {
@Override
public boolean accept(Path path) {
return !new File(path.toUri()).isDirectory();
}
})) {
FSDataInputStream in = fs.open(stat.getPath());
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(in));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println("Actual:" + line);
actualRows.add(line.trim());
}
} finally {
if (br != null) {
br.close();
}
}
}
BufferedReader br = null;
List<String> expectedRows = new ArrayList<String>();
try {
br = new BufferedReader(new FileReader(new File(dataFile)));
String line = "";
while ((line = br.readLine()) != null) {
String row = line.trim();
if (formatNulls) {
row += ",-NA-,";
row += line.trim();
}
expectedRows.add(row);
}
} finally {
if (br != null) {
br.close();
}
}
assertEquals(actualRows, expectedRows);
}
/**
* Test persistent result set.
*
* @throws Exception the exception
*/
@Test
public void testPersistentResultSet() throws Exception {
int handleSize = getHandleSize();
createTestTable("test_persistent_result_set");
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
queryConf.set(LensConfConstants.RESULT_SET_PARENT_DIR, testOutputDir);
QueryContext ctx = createContext("SELECT ID FROM test_persistent_result_set", queryConf);
LensResultSet resultSet = driver.execute(ctx);
validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), false);
assertHandleSize(handleSize);
ctx = createContext("SELECT ID FROM test_persistent_result_set", queryConf);
driver.executeAsync(ctx);
assertHandleSize(handleSize + 1);
validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, false);
driver.closeQuery(ctx.getQueryHandle());
assertHandleSize(handleSize);
queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", queryConf);
resultSet = driver.execute(ctx);
assertHandleSize(handleSize);
validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), true);
driver.closeQuery(ctx.getQueryHandle());
assertHandleSize(handleSize);
ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", queryConf);
driver.executeAsync(ctx);
assertHandleSize(handleSize + 1);
validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, true);
driver.closeQuery(ctx.getQueryHandle());
assertHandleSize(handleSize);
}
/**
* Wait for async query.
*
* @param ctx the ctx
* @param driver the driver
* @throws Exception the exception
*/
private void waitForAsyncQuery(QueryContext ctx, HiveDriver driver) throws Exception {
while (true) {
driver.updateStatus(ctx);
System.out.println("#W Waiting for query " + ctx.getQueryHandle() + " status: "
+ ctx.getDriverStatus().getState());
assertNotNull(ctx.getDriverStatus());
if (ctx.getDriverStatus().isFinished()) {
assertTrue(ctx.getDriverStatus().getDriverFinishTime() > 0);
break;
}
System.out.println("Progress:" + ctx.getDriverStatus().getProgressMessage());
Thread.sleep(1000);
assertTrue(ctx.getDriverStatus().getDriverStartTime() > 0);
}
}
@Test(expectedExceptions = {UnsupportedOperationException.class})
public void testEstimateNativeQuery() throws Exception {
createTestTable("test_estimate");
SessionState.setCurrentSessionState(ss);
QueryCost cost = driver.estimate(createExplainContext("SELECT ID FROM test_estimate", queryConf));
assertEquals(cost.getEstimatedResourceUsage(), Double.MAX_VALUE);
cost.getEstimatedExecTimeMillis();
}
@Test(expectedExceptions = {UnsupportedOperationException.class})
public void testEstimateOlapQuery() throws Exception {
SessionState.setCurrentSessionState(ss);
ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM test_cube", queryConf);
ctx.setOlapQuery(true);
ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
@Override
public String getPlan() {
return null;
}
@Override
public QueryCost getCost() {
return null;
}
@Override
public Map<String, Set<?>> getPartitions() {
return Maps.newHashMap();
}
});
QueryCost cost = driver.estimate(ctx);
assertEquals(cost.getEstimatedResourceUsage(), 0.0);
cost.getEstimatedExecTimeMillis();
}
@Test
public void testExplainNativeFailingQuery() throws Exception {
SessionState.setCurrentSessionState(ss);
try {
driver.estimate(createExplainContext("SELECT ID FROM nonexist", queryConf));
fail("Should not reach here");
} catch (LensException e) {
assertTrue(LensUtil.getCauseMessage(e).contains("Line 1:32 Table not found 'nonexist'"));
}
}
// explain
/**
* Test explain.
*
* @throws Exception the exception
*/
@Test
public void testExplain() throws Exception {
int handleSize = getHandleSize();
SessionState.setCurrentSessionState(ss);
SessionState.get().setCurrentDatabase(dataBase);
createTestTable("test_explain");
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_explain", queryConf));
assertTrue(plan instanceof HiveQueryPlan);
assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
assertHandleSize(handleSize);
// test execute prepare
PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, queryConf, drivers);
pctx.setSelectedDriver(driver);
pctx.setLensSessionIdentifier(sessionid);
SessionState.setCurrentSessionState(ss);
Configuration inConf = new Configuration(queryConf);
inConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
plan = driver.explainAndPrepare(pctx);
QueryContext qctx = createContext(pctx, inConf);
LensResultSet result = driver.execute(qctx);
assertHandleSize(handleSize);
validateInMemoryResult(result);
// test execute prepare async
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
qctx = createContext(pctx, queryConf);
driver.executeAsync(qctx);
assertNotNull(qctx.getDriverOpHandle());
validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
assertHandleSize(handleSize + 1);
driver.closeQuery(qctx.getQueryHandle());
assertHandleSize(handleSize);
// for backward compatibility
qctx = createContext(pctx, inConf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
result = driver.execute(qctx);
assertNotNull(qctx.getDriverOpHandle());
assertHandleSize(handleSize);
validateInMemoryResult(result);
// test execute prepare async
qctx = createContext(pctx, queryConf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
driver.executeAsync(qctx);
assertHandleSize(handleSize + 1);
validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
driver.closeQuery(qctx.getQueryHandle());
driver.closePreparedQuery(pctx.getPrepareHandle());
assertHandleSize(handleSize);
}
/**
* Test explain partitioned table
*
* @throws Exception the exception
*/
@Test
public void testExplainPartitionedTable() throws Exception {
int handleSize = getHandleSize();
createPartitionedTable("test_part_table");
// acquire
SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_part_table", queryConf));
assertHandleSize(handleSize);
assertTrue(plan instanceof HiveQueryPlan);
assertNotNull(plan.getTablesQueried());
assertEquals(plan.getTablesQueried().size(), 1);
System.out.println("Tables:" + plan.getTablesQueried());
assertEquals(plan.getTableWeight(dataBase + ".test_part_table"), 500.0);
System.out.println("Parts:" + plan.getPartitions());
assertFalse(plan.getPartitions().isEmpty());
assertEquals(plan.getPartitions().size(), 1);
assertTrue(((String) plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("today"));
assertTrue(((String) plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("dt"));
}
/**
* Test explain output.
*
* @throws Exception the exception
*/
@Test
public void testExplainOutput() throws Exception {
int handleSize = getHandleSize();
createTestTable("explain_test_1");
createTestTable("explain_test_2");
SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT explain_test_1.ID, count(1) FROM "
+ " explain_test_1 join explain_test_2 on explain_test_1.ID = explain_test_2.ID"
+ " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " GROUP BY explain_test_1.ID",
queryConf));
assertHandleSize(handleSize);
assertTrue(plan instanceof HiveQueryPlan);
assertNotNull(plan.getTablesQueried());
assertEquals(plan.getTablesQueried().size(), 2);
assertNotNull(plan.getTableWeights());
assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_1"));
assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_2"));
assertTrue(plan.getPlan() != null && !plan.getPlan().isEmpty());
driver.closeQuery(plan.getHandle());
}
/**
* Test explain output persistent.
*
* @throws Exception the exception
*/
@Test
public void testExplainOutputPersistent() throws Exception {
int handleSize = getHandleSize();
createTestTable("explain_test_1");
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
SessionState.setCurrentSessionState(ss);
String query2 = "SELECT DISTINCT ID FROM explain_test_1";
PreparedQueryContext pctx = createPreparedQueryContext(query2);
pctx.setSelectedDriver(driver);
pctx.setLensSessionIdentifier(sessionid);
DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
// assertNotNull(plan2.getResultDestination());
assertHandleSize(handleSize);
assertNotNull(plan2.getTablesQueried());
assertEquals(plan2.getTablesQueried().size(), 1);
assertTrue(plan2.getTableWeights().containsKey(dataBase + ".explain_test_1"));
QueryContext ctx = createContext(pctx, queryConf);
LensResultSet resultSet = driver.execute(ctx);
assertHandleSize(handleSize);
HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
String path = persistentResultSet.getOutputPath();
assertEquals(ctx.getDriverResultPath(), path);
driver.closeQuery(plan2.getHandle());
}
private PreparedQueryContext createPreparedQueryContext(String query2) {
PreparedQueryContext pctx = new PreparedQueryContext(query2, null, queryConf, drivers);
pctx.setSelectedDriver(driver);
pctx.setLensSessionIdentifier(sessionid);
return pctx;
}
@DataProvider
public Object[][] priorityDataProvider() throws IOException, ParseException {
BufferedReader br = new BufferedReader(new InputStreamReader(
TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
String line;
int numTests = Integer.parseInt(br.readLine());
Object[][] data = new Object[numTests][2];
for (int i = 0; i < numTests; i++) {
String[] kv = br.readLine().split("\\s*:\\s*");
final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
final Priority expected = Priority.valueOf(kv[1]);
data[i] = new Object[]{partitions, expected};
}
return data;
}
/**
* Testing Duration Based Priority Logic by mocking everything except partitions.
*
* @throws IOException
* @throws LensException
* @throws ParseException
*/
@Test(dataProvider = "priorityDataProvider")
public void testPriority(final Set<FactPartition> partitions, Priority expected) throws Exception {
Configuration conf = new Configuration();
QueryContext ctx = createContext("test priority query", conf);
ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
@Override
public String getPlan() {
return null;
}
@Override
public QueryCost getCost() {
return null;
}
});
ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
new HashMap<String, Set<FactPartition>>() {
{
put("table1", partitions);
}
});
// table weights only for first calculation
ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
new HashMap<String, Double>() {
{
put("table1", 1.0);
}
});
ctx.setOlapQuery(true);
Priority priority = driver.decidePriority(ctx);
assertEquals(priority, expected, "cost: " + ctx.getDriverQueryCost(driver) + "priority: " + priority);
assertEquals(ctx.getConf().get("mapred.job.priority"), priority.toString());
assertEquals(driver.decidePriority(ctx, alwaysNormalPriorityDecider), Priority.NORMAL);
}
@Test
public void testPriorityWithoutFactPartitions() throws LensException {
// test priority without fact partitions
QueryContext ctx = createContext("test priority query", queryConf);
ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
@Override
public String getPlan() {
return null;
}
@Override
public QueryCost getCost() {
return null;
}
});
ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
new HashMap<String, Set<String>>() {
{
put("table1", new HashSet<String>());
}
});
ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
new HashMap<String, Double>() {
{
put("table1", 1.0);
}
});
ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
assertEquals(driver.decidePriority(ctx, driver.queryPriorityDecider), Priority.VERY_HIGH);
assertEquals(driver.decidePriority(ctx, alwaysNormalPriorityDecider), Priority.NORMAL);
// test priority without rewriter plan
ctx = createContext("test priority query", queryConf);
ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
@Override
public String getPlan() {
return null;
}
@Override
public QueryCost getCost() {
return null;
}
});
ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)), Priority.NORMAL);
}
private Set<FactPartition> getFactParts(List<String> partStrings) throws ParseException {
Set<FactPartition> factParts = new HashSet<FactPartition>();
for (String partStr : partStrings) {
String[] partEls = partStr.split(" ");
UpdatePeriod p = null;
String partSpec = partEls[1];
switch (partSpec.length()) {
case 7: //monthly
p = UpdatePeriod.MONTHLY;
break;
case 10: // daily
p = UpdatePeriod.DAILY;
break;
case 13: // hourly
p = UpdatePeriod.HOURLY;
break;
}
FactPartition part = new FactPartition(partEls[0], p.parse(partSpec), p, null, p.format(),
Collections.singleton("table1"));
factParts.add(part);
}
return factParts;
}
private int getHandleSize() {
return driver.getHiveHandleSize();
}
private void assertHandleSize(int handleSize) {
assertEquals(getHandleSize(), handleSize, "Unexpected handle size, all handles: "
+ driver.getHiveHandles());
}
}
|
apache/ofbiz-framework | 37,709 | applications/accounting/src/main/java/org/apache/ofbiz/accounting/thirdparty/cybersource/IcsPaymentServices.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.accounting.thirdparty.cybersource;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import org.apache.ofbiz.accounting.payment.PaymentGatewayServices;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.SSLUtil;
import org.apache.ofbiz.base.util.StringUtil;
import org.apache.ofbiz.base.util.UtilGenerics;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilNumber;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.base.util.string.FlexibleStringExpander;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.util.EntityUtilProperties;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.ServiceUtil;
import com.cybersource.ws.client.Client;
import com.cybersource.ws.client.ClientException;
import com.cybersource.ws.client.FaultException;
/**
* CyberSource WS Integration Services
*/
public class IcsPaymentServices {
private static final String MODULE = IcsPaymentServices.class.getName();
private static final String RESOURCE = "AccountingUiLabels";
private static final int DECIMALS = UtilNumber.getBigDecimalScale("invoice.decimals");
private static final RoundingMode ROUNDING = UtilNumber.getRoundingMode("invoice.rounding");
// load the JSSE properties
static {
SSLUtil.loadJsseProperties();
}
public static Map<String, Object> ccAuth(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildAuthRequest(context, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", MODULE);
Debug.logError(e, "Fault : " + e.getFaultString(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processAuthResult(reply, result, delegator);
return result;
}
public static Map<String, Object> ccReAuth(DispatchContext dctx, Map<String, ? extends Object> context) {
return ServiceUtil.returnSuccess();
}
public static Map<String, Object> ccCapture(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
//lets see if there is a auth transaction already in context
GenericValue authTransaction = (GenericValue) context.get("authTrans");
Locale locale = (Locale) context.get("locale");
if (authTransaction == null) {
authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
}
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotCapture", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildCaptureRequest(context, authTransaction, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processCaptureResult(reply, result);
return result;
}
public static Map<String, Object> ccRelease(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
Locale locale = (Locale) context.get("locale");
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRelease", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildReleaseRequest(context, authTransaction);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processReleaseResult(reply, result);
return result;
}
public static Map<String, Object> ccRefund(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
Locale locale = (Locale) context.get("locale");
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRefund", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildRefundRequest(context, authTransaction, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processRefundResult(reply, result);
return result;
}
public static Map<String, Object> ccCredit(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildCreditRequest(context);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), MODULE);
return ServiceUtil.returnError(UtilProperties.getMessage(RESOURCE,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processCreditResult(reply, result);
return result;
}
private static Properties buildCsProperties(Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String merchantId = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantId", configString, "payment.cybersource.merchantID");
String targetApi = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "apiVersion", configString, "payment.cybersource.api.version");
String production = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "production", configString, "payment.cybersource.production");
String enableLog = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logEnabled", configString, "payment.cybersource.log");
String logSize = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logSize", configString, "payment.cybersource.log.size");
String logFile = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logFile", configString, "payment.cybersource.log.file");
String logDir = FlexibleStringExpander.expandString(getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logDir", configString, "payment.cybersource.log.dir"), context);
String keysDir = FlexibleStringExpander.expandString(getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "keysDir", configString, "payment.cybersource.keysDir"), context);
String keysFile = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "keysFile", configString, "payment.cybersource.keysFile");
// some property checking
if (UtilValidate.isEmpty(merchantId)) {
Debug.logWarning("The merchantId property is not configured", MODULE);
return null;
}
if (UtilValidate.isEmpty(keysDir)) {
Debug.logWarning("The keysDir property is not configured", MODULE);
return null;
}
// create some properties for CS Client
Properties props = new Properties();
props.put("merchantID", merchantId);
props.put("keysDirectory", keysDir);
props.put("targetAPIVersion", targetApi);
props.put("sendToProduction", production);
props.put("enableLog", enableLog);
props.put("logDirectory", logDir);
props.put("logFilename", logFile);
props.put("logMaximumSize", logSize);
if (UtilValidate.isNotEmpty(keysFile)) {
props.put("alternateKeyFilename", keysFile);
}
Debug.logInfo("Created CyberSource Properties : " + props, MODULE);
return props;
}
private static Map<String, Object> buildAuthRequest(Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
String currency = (String) context.get("currency");
if (configString == null) {
configString = "payment.properties";
}
// make the request map
String capture = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "autoBill", configString, "payment.cybersource.autoBill", "false");
String orderId = (String) context.get("orderId");
Map<String, Object> request = new HashMap<>();
request.put("ccAuthService_run", "true"); // run auth service
request.put("ccCaptureService_run", capture); // run capture service (i.e. sale)
request.put("merchantReferenceCode", orderId); // set the order ref number
request.put("purchaseTotals_currency", currency); // set the order currency
appendFullBillingInfo(request, context); // add in all address info
appendItemLineInfo(request, context, "processAmount"); // add in the item info
appendAvsRules(request, context, delegator); // add in the AVS flags and decline codes
return request;
}
private static Map<String, Object> buildCaptureRequest(Map<String, ? extends Object> context, GenericValue authTransaction, Delegator delegator) {
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
String currency = (String) context.get("currency");
if (configString == null) {
configString = "payment.properties";
}
String merchantDesc = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantDescr", configString, "payment.cybersource.merchantDescr", null);
String merchantCont = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantContact", configString, "payment.cybersource.merchantContact", null);
Map<String, Object> request = new HashMap<>();
request.put("ccCaptureService_run", "true");
request.put("ccCaptureService_authRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "captureAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
// TODO: add support for verbal authorizations
if (merchantDesc != null) {
request.put("invoiceHeader_merchantDescriptor", merchantDesc); // merchant description
}
if (merchantCont != null) {
request.put("invoiceHeader_merchantDescriptorContact", merchantCont); // merchant contact info
}
return request;
}
private static Map<String, Object> buildReleaseRequest(Map<String, ? extends Object> context, GenericValue authTransaction) {
Map<String, Object> request = new HashMap<>();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String currency = (String) context.get("currency");
request.put("ccAuthReversalService_run", "true");
request.put("ccAuthReversalService_authRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "releaseAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
return request;
}
private static Map<String, Object> buildRefundRequest(Map<String, ? extends Object> context, GenericValue authTransaction, Delegator delegator) {
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String currency = (String) context.get("currency");
String merchantDesc = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantDescr", configString, "payment.cybersource.merchantDescr", null);
String merchantCont = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantContact", configString, "payment.cybersource.merchantContact", null);
Map<String, Object> request = new HashMap<>();
request.put("ccCreditService_run", "true");
request.put("ccCreditService_captureRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "refundAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
if (merchantDesc != null) {
request.put("invoiceHeader_merchantDescriptor", merchantDesc); // merchant description
}
if (merchantCont != null) {
request.put("invoiceHeader_merchantDescriptorContact", merchantCont); // merchant contact info
}
return request;
}
private static Map<String, Object> buildCreditRequest(Map<String, ? extends Object> context) {
String refCode = (String) context.get("referenceCode");
Map<String, Object> request = new HashMap<>();
request.put("ccCreditService_run", "true"); // run credit service
request.put("merchantReferenceCode", refCode); // set the ref number could be order id
appendFullBillingInfo(request, context); // add in all address info
appendItemLineInfo(request, context, "creditAmount"); // add in the item info
return request;
}
private static void appendAvsRules(Map<String, Object> request, Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String avsCodes = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "avsDeclineCodes", configString, "payment.cybersource.avsDeclineCodes", null);
GenericValue party = (GenericValue) context.get("billToParty");
if (party != null) {
GenericValue avsOverride = null;
try {
avsOverride = party.getDelegator().findOne("PartyIcsAvsOverride",
UtilMisc.toMap("partyId", party.getString("partyId")), false);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
}
if (avsOverride != null && avsOverride.get("avsDeclineString") != null) {
String overrideString = avsOverride.getString("avsDeclineString");
if (UtilValidate.isNotEmpty(overrideString)) {
avsCodes = overrideString;
}
}
}
if (UtilValidate.isNotEmpty(avsCodes)) {
request.put("businessRules_declineAVSFlags", avsCodes);
}
String avsIgnore = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "ignoreAvs", configString, "payment.cybersource.ignoreAvs", "false");
request.put("businessRules_ignoreAVS", avsIgnore);
}
private static void appendFullBillingInfo(Map<String, Object> request, Map<String, ? extends Object> context) {
// contact info
GenericValue email = (GenericValue) context.get("billToEmail");
if (email != null) {
request.put("billTo_email", email.getString("infoString"));
} else {
Debug.logWarning("Email not defined; Cybersource will fail.", MODULE);
}
// phone number seems to not be used; possibly only for reporting.
// CC payment info
GenericValue creditCard = (GenericValue) context.get("creditCard");
if (creditCard != null) {
List<String> expDateList = StringUtil.split(creditCard.getString("expireDate"), "/");
request.put("billTo_firstName", creditCard.getString("firstNameOnCard"));
request.put("billTo_lastName", creditCard.getString("lastNameOnCard"));
request.put("card_accountNumber", creditCard.getString("cardNumber"));
request.put("card_expirationMonth", expDateList.get(0));
request.put("card_expirationYear", expDateList.get(1));
} else {
Debug.logWarning("CreditCard not defined; Cybersource will fail.", MODULE);
}
// CCV info
String cvNum = (String) context.get("cardSecurityCode");
String cvSet = UtilValidate.isEmpty(cvNum) ? "1" : "0";
request.put("card_cvIndicator", cvSet);
if ("1".equals(cvNum)) {
request.put("card_cvNumber", cvNum);
}
// payment contact info
GenericValue billingAddress = (GenericValue) context.get("billingAddress");
if (billingAddress != null) {
request.put("billTo_street1", billingAddress.getString("address1"));
if (billingAddress.get("address2") != null) {
request.put("billTo_street2", billingAddress.getString("address2"));
}
request.put("billTo_city", billingAddress.getString("city"));
String bCountry = billingAddress.get("countryGeoId") != null ? billingAddress.getString("countryGeoId") : "USA";
request.put("billTo_country", bCountry);
request.put("billTo_postalCode", billingAddress.getString("postalCode"));
if (billingAddress.get("stateProvinceGeoId") != null) {
request.put("billTo_state", billingAddress.getString("stateProvinceGeoId"));
}
} else {
Debug.logWarning("BillingAddress not defined; Cybersource will fail.", MODULE);
}
// order shipping information
GenericValue shippingAddress = (GenericValue) context.get("shippingAddress");
if (shippingAddress != null) {
if (creditCard != null) {
// TODO: this is just a kludge since we don't have a firstName and lastName on the PostalAddress entity, that needs to be done
request.put("shipTo_firstName", creditCard.getString("firstNameOnCard"));
request.put("shipTo_lastName", creditCard.getString("lastNameOnCard"));
}
request.put("shipTo_street1", shippingAddress.getString("address1"));
if (shippingAddress.get("address2") != null) {
request.put("shipTo_street2", shippingAddress.getString("address2"));
}
request.put("shipTo_city", shippingAddress.getString("city"));
String sCountry = shippingAddress.get("countryGeoId") != null ? shippingAddress.getString("countryGeoId") : "USA";
request.put("shipTo_country", sCountry);
request.put("shipTo_postalCode", shippingAddress.getString("postalCode"));
if (shippingAddress.get("stateProvinceGeoId") != null) {
request.put("shipTo_state", shippingAddress.getString("stateProvinceGeoId"));
}
}
}
private static void appendItemLineInfo(Map<String, Object> request, Map<String, ? extends Object> context, String amountField) {
// send over a line item total offer w/ the total for billing; don't trust CyberSource for calc
String currency = (String) context.get("currency");
int lineNumber = 0;
request.put("item_" + lineNumber + "_unitPrice", getAmountString(context, amountField));
// the currency
request.put("purchaseTotals_currency", currency);
// create the offers (one for each line item)
List<GenericValue> orderItems = UtilGenerics.cast(context.get("orderItems"));
if (orderItems != null) {
for (Object orderItem : orderItems) {
lineNumber++;
GenericValue item = (GenericValue) orderItem;
GenericValue product = null;
try {
product = item.getRelatedOne("Product", false);
} catch (GenericEntityException e) {
Debug.logError(e, "ERROR: Unable to get Product from OrderItem, not passing info to CyberSource");
}
if (product != null) {
request.put("item_" + lineNumber + "_productName", product.getString("productName"));
request.put("item_" + lineNumber + "_productSKU", product.getString("productId"));
} else {
// no product; just send the item description -- non product items
request.put("item_" + lineNumber + "_productName", item.getString("description"));
}
// get the quantity..
BigDecimal quantity = item.getBigDecimal("quantity");
// test quantity if INT pass as is; if not pass as 1
if (quantity.scale() > 0) {
request.put("item_" + lineNumber + "_quantity", "1");
} else {
request.put("", Integer.toString(quantity.intValue()));
}
// set the amount to 0.0000 -- we will send a total too.
request.put("item_" + lineNumber + "_unitPrice", "0.0000");
}
}
}
private static String getAmountString(Map<String, ? extends Object> context, String amountField) {
BigDecimal processAmount = (BigDecimal) context.get(amountField);
return processAmount.setScale(DECIMALS, ROUNDING).toPlainString();
}
private static void processAuthResult(Map<String, Object> reply, Map<String, Object> result, Delegator delegator) {
String decision = getDecision(reply);
String checkModeStatus = EntityUtilProperties.getPropertyValue("payment", "payment.cybersource.ignoreStatus", delegator);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("authCode", reply.get("ccAuthReply_authorizationCode"));
result.put("authResult", Boolean.TRUE);
} else {
result.put("authCode", decision);
if ("N".equals(checkModeStatus)) {
result.put("authResult", Boolean.FALSE);
} else {
result.put("authResult", Boolean.TRUE);
}
// TODO: based on reasonCode populate the following flags as applicable: resultDeclined, resultNsf, resultBadExpire, resultBadCardNumber
}
if (reply.get("ccAuthReply_amount") != null) {
result.put("processAmount", new BigDecimal((String) reply.get("ccAuthReply_amount")));
} else {
result.put("processAmount", BigDecimal.ZERO);
}
result.put("authRefNum", reply.get("requestID"));
result.put("authFlag", reply.get("ccAuthReply_reasonCode"));
result.put("authMessage", reply.get("ccAuthReply_processorResponse"));
result.put("cvCode", reply.get("ccAuthReply_cvCode"));
result.put("avsCode", reply.get("ccAuthReply_avsCode"));
result.put("scoreCode", reply.get("ccAuthReply_authFactorCode"));
result.put("captureRefNum", reply.get("requestID"));
if (UtilValidate.isNotEmpty(reply.get("ccCaptureReply_reconciliationID"))) {
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("captureResult", Boolean.TRUE);
} else {
result.put("captureResult", Boolean.FALSE);
}
result.put("captureCode", reply.get("ccCaptureReply_reconciliationID"));
result.put("captureFlag", reply.get("ccCaptureReply_reasonCode"));
result.put("captureMessage", reply.get("decision"));
}
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] authorization result : " + result, MODULE);
}
private static void processCaptureResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("captureResult", Boolean.TRUE);
} else {
result.put("captureResult", Boolean.FALSE);
}
if (reply.get("ccCaptureReply_amount") != null) {
result.put("captureAmount", new BigDecimal((String) reply.get("ccCaptureReply_amount")));
} else {
result.put("captureAmount", BigDecimal.ZERO);
}
result.put("captureRefNum", reply.get("requestID"));
result.put("captureCode", reply.get("ccCaptureReply_reconciliationID"));
result.put("captureFlag", reply.get("ccCaptureReply_reasonCode"));
result.put("captureMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] capture result : " + result, MODULE);
}
private static void processReleaseResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("releaseResult", Boolean.TRUE);
} else {
result.put("releaseResult", Boolean.FALSE);
}
if (reply.get("ccAuthReversalReply_amount") != null) {
result.put("releaseAmount", new BigDecimal((String) reply.get("ccAuthReversalReply_amount")));
} else {
result.put("releaseAmount", BigDecimal.ZERO);
}
result.put("releaseRefNum", reply.get("requestID"));
result.put("releaseCode", reply.get("ccAuthReversalReply_reasonCode"));
result.put("releaseFlag", reply.get("reasonCode"));
result.put("releaseMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] release result : " + result, MODULE);
}
private static void processRefundResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("refundResult", Boolean.TRUE);
} else {
result.put("refundResult", Boolean.FALSE);
}
if (reply.get("ccCreditReply_amount") != null) {
result.put("refundAmount", new BigDecimal((String) reply.get("ccCreditReply_amount")));
} else {
result.put("refundAmount", BigDecimal.ZERO);
}
result.put("refundRefNum", reply.get("requestID"));
result.put("refundCode", reply.get("ccCreditReply_reconciliationID"));
result.put("refundFlag", reply.get("ccCreditReply_reasonCode"));
result.put("refundMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] refund result : " + result, MODULE);
}
private static void processCreditResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = (String) reply.get("decision");
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("creditResult", Boolean.TRUE);
} else {
result.put("creditResult", Boolean.FALSE);
}
if (reply.get("ccCreditReply_amount") != null) {
result.put("creditAmount", new BigDecimal((String) reply.get("ccCreditReply_amount")));
} else {
result.put("creditAmount", BigDecimal.ZERO);
}
result.put("creditRefNum", reply.get("requestID"));
result.put("creditCode", reply.get("ccCreditReply_reconciliationID"));
result.put("creditFlag", reply.get("ccCreditReply_reasonCode"));
result.put("creditMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] credit result : " + result, MODULE);
}
private static String getDecision(Map<String, Object> reply) {
String decision = (String) reply.get("decision");
String reasonCode = (String) reply.get("reasonCode");
if (!"ACCEPT".equalsIgnoreCase(decision)) {
Debug.logInfo("CyberSource : " + decision + " (" + reasonCode + ")", MODULE);
Debug.logInfo("Reply Dump : " + reply, MODULE);
}
return decision;
}
private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName,
String resource, String parameterName) {
String returnValue = "";
if (UtilValidate.isNotEmpty(paymentGatewayConfigId)) {
try {
GenericValue cyberSource = EntityQuery.use(delegator).from("PaymentGatewayCyberSource").where("paymentGatewayConfigId", paymentGatewayConfigId).queryOne();
if (cyberSource != null) {
Object cyberSourceField = cyberSource.get(paymentGatewayConfigParameterName);
if (cyberSourceField != null) {
returnValue = cyberSourceField.toString().trim();
}
}
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
}
} else {
String value = EntityUtilProperties.getPropertyValue(resource, parameterName, delegator);
if (value != null) {
returnValue = value.trim();
}
}
return returnValue;
}
private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName,
String resource, String parameterName, String defaultValue) {
String returnValue = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, paymentGatewayConfigParameterName, resource, parameterName);
if (UtilValidate.isEmpty(returnValue)) {
returnValue = defaultValue;
}
return returnValue;
}
}
|
googleapis/google-cloud-java | 37,948 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListDataAccessLabelsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/data_access_control.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Response message for ListDataAccessLabels.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessLabelsResponse}
*/
public final class ListDataAccessLabelsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListDataAccessLabelsResponse)
ListDataAccessLabelsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataAccessLabelsResponse.newBuilder() to construct.
private ListDataAccessLabelsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataAccessLabelsResponse() {
dataAccessLabels_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataAccessLabelsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.class,
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.Builder.class);
}
public static final int DATA_ACCESS_LABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.chronicle.v1.DataAccessLabel> dataAccessLabels_;
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.chronicle.v1.DataAccessLabel> getDataAccessLabelsList() {
return dataAccessLabels_;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder>
getDataAccessLabelsOrBuilderList() {
return dataAccessLabels_;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
@java.lang.Override
public int getDataAccessLabelsCount() {
return dataAccessLabels_.size();
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.DataAccessLabel getDataAccessLabels(int index) {
return dataAccessLabels_.get(index);
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder getDataAccessLabelsOrBuilder(
int index) {
return dataAccessLabels_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataAccessLabels_.size(); i++) {
output.writeMessage(1, dataAccessLabels_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataAccessLabels_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataAccessLabels_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse other =
(com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse) obj;
if (!getDataAccessLabelsList().equals(other.getDataAccessLabelsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataAccessLabelsCount() > 0) {
hash = (37 * hash) + DATA_ACCESS_LABELS_FIELD_NUMBER;
hash = (53 * hash) + getDataAccessLabelsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListDataAccessLabels.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessLabelsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListDataAccessLabelsResponse)
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.class,
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataAccessLabelsBuilder_ == null) {
dataAccessLabels_ = java.util.Collections.emptyList();
} else {
dataAccessLabels_ = null;
dataAccessLabelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse build() {
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse buildPartial() {
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse result =
new com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse result) {
if (dataAccessLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataAccessLabels_ = java.util.Collections.unmodifiableList(dataAccessLabels_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataAccessLabels_ = dataAccessLabels_;
} else {
result.dataAccessLabels_ = dataAccessLabelsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse) {
return mergeFrom((com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse other) {
if (other == com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse.getDefaultInstance())
return this;
if (dataAccessLabelsBuilder_ == null) {
if (!other.dataAccessLabels_.isEmpty()) {
if (dataAccessLabels_.isEmpty()) {
dataAccessLabels_ = other.dataAccessLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.addAll(other.dataAccessLabels_);
}
onChanged();
}
} else {
if (!other.dataAccessLabels_.isEmpty()) {
if (dataAccessLabelsBuilder_.isEmpty()) {
dataAccessLabelsBuilder_.dispose();
dataAccessLabelsBuilder_ = null;
dataAccessLabels_ = other.dataAccessLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
dataAccessLabelsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataAccessLabelsFieldBuilder()
: null;
} else {
dataAccessLabelsBuilder_.addAllMessages(other.dataAccessLabels_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.chronicle.v1.DataAccessLabel m =
input.readMessage(
com.google.cloud.chronicle.v1.DataAccessLabel.parser(), extensionRegistry);
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.add(m);
} else {
dataAccessLabelsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.chronicle.v1.DataAccessLabel> dataAccessLabels_ =
java.util.Collections.emptyList();
private void ensureDataAccessLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataAccessLabels_ =
new java.util.ArrayList<com.google.cloud.chronicle.v1.DataAccessLabel>(
dataAccessLabels_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.DataAccessLabel,
com.google.cloud.chronicle.v1.DataAccessLabel.Builder,
com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder>
dataAccessLabelsBuilder_;
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public java.util.List<com.google.cloud.chronicle.v1.DataAccessLabel> getDataAccessLabelsList() {
if (dataAccessLabelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataAccessLabels_);
} else {
return dataAccessLabelsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public int getDataAccessLabelsCount() {
if (dataAccessLabelsBuilder_ == null) {
return dataAccessLabels_.size();
} else {
return dataAccessLabelsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public com.google.cloud.chronicle.v1.DataAccessLabel getDataAccessLabels(int index) {
if (dataAccessLabelsBuilder_ == null) {
return dataAccessLabels_.get(index);
} else {
return dataAccessLabelsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder setDataAccessLabels(
int index, com.google.cloud.chronicle.v1.DataAccessLabel value) {
if (dataAccessLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.set(index, value);
onChanged();
} else {
dataAccessLabelsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder setDataAccessLabels(
int index, com.google.cloud.chronicle.v1.DataAccessLabel.Builder builderForValue) {
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.set(index, builderForValue.build());
onChanged();
} else {
dataAccessLabelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder addDataAccessLabels(com.google.cloud.chronicle.v1.DataAccessLabel value) {
if (dataAccessLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.add(value);
onChanged();
} else {
dataAccessLabelsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder addDataAccessLabels(
int index, com.google.cloud.chronicle.v1.DataAccessLabel value) {
if (dataAccessLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.add(index, value);
onChanged();
} else {
dataAccessLabelsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder addDataAccessLabels(
com.google.cloud.chronicle.v1.DataAccessLabel.Builder builderForValue) {
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.add(builderForValue.build());
onChanged();
} else {
dataAccessLabelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder addDataAccessLabels(
int index, com.google.cloud.chronicle.v1.DataAccessLabel.Builder builderForValue) {
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.add(index, builderForValue.build());
onChanged();
} else {
dataAccessLabelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder addAllDataAccessLabels(
java.lang.Iterable<? extends com.google.cloud.chronicle.v1.DataAccessLabel> values) {
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataAccessLabels_);
onChanged();
} else {
dataAccessLabelsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder clearDataAccessLabels() {
if (dataAccessLabelsBuilder_ == null) {
dataAccessLabels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataAccessLabelsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public Builder removeDataAccessLabels(int index) {
if (dataAccessLabelsBuilder_ == null) {
ensureDataAccessLabelsIsMutable();
dataAccessLabels_.remove(index);
onChanged();
} else {
dataAccessLabelsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public com.google.cloud.chronicle.v1.DataAccessLabel.Builder getDataAccessLabelsBuilder(
int index) {
return getDataAccessLabelsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder getDataAccessLabelsOrBuilder(
int index) {
if (dataAccessLabelsBuilder_ == null) {
return dataAccessLabels_.get(index);
} else {
return dataAccessLabelsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public java.util.List<? extends com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder>
getDataAccessLabelsOrBuilderList() {
if (dataAccessLabelsBuilder_ != null) {
return dataAccessLabelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataAccessLabels_);
}
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public com.google.cloud.chronicle.v1.DataAccessLabel.Builder addDataAccessLabelsBuilder() {
return getDataAccessLabelsFieldBuilder()
.addBuilder(com.google.cloud.chronicle.v1.DataAccessLabel.getDefaultInstance());
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public com.google.cloud.chronicle.v1.DataAccessLabel.Builder addDataAccessLabelsBuilder(
int index) {
return getDataAccessLabelsFieldBuilder()
.addBuilder(index, com.google.cloud.chronicle.v1.DataAccessLabel.getDefaultInstance());
}
/**
*
*
* <pre>
* List of data access labels.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.DataAccessLabel data_access_labels = 1;</code>
*/
public java.util.List<com.google.cloud.chronicle.v1.DataAccessLabel.Builder>
getDataAccessLabelsBuilderList() {
return getDataAccessLabelsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.DataAccessLabel,
com.google.cloud.chronicle.v1.DataAccessLabel.Builder,
com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder>
getDataAccessLabelsFieldBuilder() {
if (dataAccessLabelsBuilder_ == null) {
dataAccessLabelsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.DataAccessLabel,
com.google.cloud.chronicle.v1.DataAccessLabel.Builder,
com.google.cloud.chronicle.v1.DataAccessLabelOrBuilder>(
dataAccessLabels_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dataAccessLabels_ = null;
}
return dataAccessLabelsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListDataAccessLabelsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListDataAccessLabelsResponse)
private static final com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse();
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataAccessLabelsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDataAccessLabelsResponse>() {
@java.lang.Override
public ListDataAccessLabelsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataAccessLabelsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataAccessLabelsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/iotdb | 38,058 | iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/pipe/receiver/IoTDBFileReceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.commons.pipe.receiver;
import org.apache.iotdb.common.rpc.thrift.TSStatus;
import org.apache.iotdb.commons.conf.CommonDescriptor;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.iotdb.commons.pipe.config.PipeConfig;
import org.apache.iotdb.commons.pipe.config.constant.PipeSinkConstant;
import org.apache.iotdb.commons.pipe.resource.log.PipeLogger;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.common.PipeTransferHandshakeConstant;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.IoTDBSinkRequestVersion;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeRequestType;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeTransferFilePieceReq;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeTransferFileSealReqV1;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeTransferFileSealReqV2;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeTransferHandshakeV1Req;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.request.PipeTransferHandshakeV2Req;
import org.apache.iotdb.commons.pipe.sink.payload.thrift.response.PipeTransferFilePieceResp;
import org.apache.iotdb.commons.utils.RetryUtils;
import org.apache.iotdb.commons.utils.StatusUtils;
import org.apache.iotdb.rpc.RpcUtils;
import org.apache.iotdb.rpc.TSStatusCode;
import org.apache.iotdb.service.rpc.thrift.TPipeTransferResp;
import org.apache.commons.io.FileUtils;
import org.apache.tsfile.common.constant.TsFileConstant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static org.apache.iotdb.commons.pipe.config.constant.PipeSinkConstant.CONNECTOR_EXCEPTION_DATA_CONVERT_ON_TYPE_MISMATCH_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeSinkConstant.CONNECTOR_IOTDB_PASSWORD_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeSinkConstant.CONNECTOR_IOTDB_USER_DEFAULT_VALUE;
/**
* {@link IoTDBFileReceiver} is the parent class of receiver on both configNode and DataNode,
* handling all the logic of parallel file receiving.
*/
public abstract class IoTDBFileReceiver implements IoTDBReceiver {
private static final Logger LOGGER = LoggerFactory.getLogger(IoTDBFileReceiver.class);
protected final AtomicReference<File> receiverFileDirWithIdSuffix = new AtomicReference<>();
// Used to generate transfer id, which is used to identify a receiver thread.
private static final AtomicLong RECEIVER_ID_GENERATOR = new AtomicLong(0);
protected final AtomicLong receiverId = new AtomicLong(0);
// Used to restore the original thread name when the receiver is closed.
private String originalThreadName;
protected long userId = -1;
protected String username = CONNECTOR_IOTDB_USER_DEFAULT_VALUE;
protected String cliHostname = "";
protected String password = CONNECTOR_IOTDB_PASSWORD_DEFAULT_VALUE;
protected long lastSuccessfulLoginTime = Long.MIN_VALUE;
private static final PipeConfig PIPE_CONFIG = PipeConfig.getInstance();
private File writingFile;
private RandomAccessFile writingFileWriter;
protected boolean shouldConvertDataTypeOnTypeMismatch =
CONNECTOR_EXCEPTION_DATA_CONVERT_ON_TYPE_MISMATCH_DEFAULT_VALUE;
// Used to determine current strategy is sync or async
protected final AtomicBoolean isUsingAsyncLoadTsFileStrategy = new AtomicBoolean(false);
protected final AtomicBoolean validateTsFile = new AtomicBoolean(true);
protected final AtomicBoolean shouldMarkAsPipeRequest = new AtomicBoolean(true);
@Override
public IoTDBSinkRequestVersion getVersion() {
return IoTDBSinkRequestVersion.VERSION_1;
}
protected TPipeTransferResp handleTransferHandshakeV1(final PipeTransferHandshakeV1Req req) {
if (!CommonDescriptor.getInstance()
.getConfig()
.getTimestampPrecision()
.equals(req.getTimestampPrecision())) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR,
String.format(
"IoTDB receiver's timestamp precision %s, "
+ "connector's timestamp precision %s. Validation fails.",
CommonDescriptor.getInstance().getConfig().getTimestampPrecision(),
req.getTimestampPrecision()));
PipeLogger.log(LOGGER::warn, "Handshake failed, response status = %s.", status);
return new TPipeTransferResp(status);
}
if (originalThreadName == null) {
originalThreadName = Thread.currentThread().getName();
}
receiverId.set(RECEIVER_ID_GENERATOR.incrementAndGet());
Thread.currentThread()
.setName(
String.format(
"Pipe-Receiver-%s-%s:%s", receiverId.get(), getSenderHost(), getSenderPort()));
// Clear the original receiver file dir if exists
if (receiverFileDirWithIdSuffix.get() != null) {
if (receiverFileDirWithIdSuffix.get().exists()) {
try {
RetryUtils.retryOnException(
() -> {
FileUtils.deleteDirectory(receiverFileDirWithIdSuffix.get());
return null;
});
LOGGER.info(
"Receiver id = {}: Original receiver file dir {} was deleted.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
} catch (Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to delete original receiver file dir %s, because %s.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath(),
e.getMessage(),
e);
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Original receiver file dir {} is not existed. No need to delete.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
}
}
receiverFileDirWithIdSuffix.set(null);
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Current receiver file dir is null. No need to delete.",
receiverId.get());
}
}
String receiverFileBaseDir;
File newReceiverDir = null;
for (int retryTimes = 0; retryTimes <= 1; retryTimes++) {
try {
receiverFileBaseDir = getReceiverFileBaseDir();
if (Objects.isNull(receiverFileBaseDir)) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to init pipe receiver file folder manager because all disks of folders are full.",
receiverId.get());
return new TPipeTransferResp(StatusUtils.getStatus(TSStatusCode.DISK_SPACE_INSUFFICIENT));
}
} catch (Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to create pipe receiver file folder because all disks of folders are full.",
receiverId.get(),
e);
return new TPipeTransferResp(StatusUtils.getStatus(TSStatusCode.DISK_SPACE_INSUFFICIENT));
}
try {
// Create a new receiver file dir
newReceiverDir = new File(receiverFileBaseDir, Long.toString(receiverId.get()));
if (newReceiverDir.exists() || newReceiverDir.mkdirs()) {
receiverFileDirWithIdSuffix.set(newReceiverDir);
LOGGER.info(
"Receiver id = {}: Handshake successfully! Sender's host = {}, port = {}. Receiver's file dir = {}.",
receiverId.get(),
getSenderHost(),
getSenderPort(),
newReceiverDir.getPath());
return new TPipeTransferResp(RpcUtils.SUCCESS_STATUS);
}
} catch (Exception ignored) {
}
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to create receiver file dir %s.",
receiverId.get(),
Objects.nonNull(newReceiverDir) ? newReceiverDir.getPath() : null);
markFileBaseDirStateAbnormal(receiverFileBaseDir);
}
return new TPipeTransferResp(
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR,
String.format("Failed to create receiver file dir %s.", newReceiverDir.getPath())));
}
protected abstract String getReceiverFileBaseDir() throws Exception;
protected abstract void markFileBaseDirStateAbnormal(String dir);
protected abstract String getSenderHost();
protected abstract String getSenderPort();
protected TPipeTransferResp handleTransferHandshakeV2(final PipeTransferHandshakeV2Req req)
throws IOException {
// Reject to handshake if the receiver can not take clusterId from config node.
final String clusterIdFromConfigNode = getClusterId();
if (clusterIdFromConfigNode == null) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR,
"Receiver can not get clusterId from config node.");
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Handshake failed, response status = %s.",
receiverId.get(),
status);
return new TPipeTransferResp(status);
}
// Reject to handshake if the request does not contain sender's clusterId.
final String clusterIdFromHandshakeRequest =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_CLUSTER_ID);
if (clusterIdFromHandshakeRequest == null) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR, "Handshake request does not contain clusterId.");
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Handshake failed, response status = %s.",
receiverId.get(),
status);
return new TPipeTransferResp(status);
}
// Reject to handshake if the receiver and sender are from the same cluster.
if (Objects.equals(clusterIdFromConfigNode, clusterIdFromHandshakeRequest)) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR,
String.format(
"Receiver and sender are from the same cluster %s.",
clusterIdFromHandshakeRequest));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Handshake failed, response status = %s.",
receiverId.get(),
status);
return new TPipeTransferResp(status);
}
// Reject to handshake if the request does not contain timestampPrecision.
final String timestampPrecision =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_TIME_PRECISION);
if (timestampPrecision == null) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_HANDSHAKE_ERROR,
"Handshake request does not contain timestampPrecision.");
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Handshake failed, response status = %s.",
receiverId.get(),
status);
return new TPipeTransferResp(status);
}
final String userIdString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_USER_ID);
if (userIdString != null) {
userId = Long.parseLong(userIdString);
}
final String usernameString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_USERNAME);
if (usernameString != null) {
username = usernameString;
}
final String cliHostnameString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_CLI_HOSTNAME);
if (cliHostnameString != null) {
cliHostname = cliHostnameString;
}
final String passwordString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_PASSWORD);
if (passwordString != null) {
password = passwordString;
}
final TSStatus status = loginIfNecessary();
if (status.code != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Handshake failed because login failed, response status = %s.",
receiverId.get(),
status);
return new TPipeTransferResp(status);
} else {
LOGGER.info("Receiver id = {}: User {} login successfully.", receiverId.get(), username);
}
final String shouldConvertDataTypeOnTypeMismatchString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_CONVERT_ON_TYPE_MISMATCH);
if (shouldConvertDataTypeOnTypeMismatchString != null) {
shouldConvertDataTypeOnTypeMismatch =
Boolean.parseBoolean(shouldConvertDataTypeOnTypeMismatchString);
}
final String loadTsFileStrategyString =
req.getParams().get(PipeTransferHandshakeConstant.HANDSHAKE_KEY_LOAD_TSFILE_STRATEGY);
if (loadTsFileStrategyString != null) {
isUsingAsyncLoadTsFileStrategy.set(
Objects.equals(
PipeSinkConstant.CONNECTOR_LOAD_TSFILE_STRATEGY_ASYNC_VALUE,
loadTsFileStrategyString));
}
validateTsFile.set(
Boolean.parseBoolean(
req.getParams()
.getOrDefault(
PipeTransferHandshakeConstant.HANDSHAKE_KEY_VALIDATE_TSFILE, "true")));
shouldMarkAsPipeRequest.set(
Boolean.parseBoolean(
req.getParams()
.getOrDefault(
PipeTransferHandshakeConstant.HANDSHAKE_KEY_MARK_AS_PIPE_REQUEST, "true")));
// Handle the handshake request as a v1 request.
// Here we construct a fake "dataNode" request to valid from v1 validation logic, though
// it may not require the actual type of the v1 request.
return handleTransferHandshakeV1(
new PipeTransferHandshakeV1Req() {
@Override
protected PipeRequestType getPlanType() {
return PipeRequestType.HANDSHAKE_DATANODE_V1;
}
}.convertToTPipeTransferReq(timestampPrecision));
}
protected abstract String getClusterId();
protected boolean shouldLogin() {
final long pipeReceiverLoginPeriodicVerificationIntervalMs =
PIPE_CONFIG.getPipeReceiverLoginPeriodicVerificationIntervalMs();
return pipeReceiverLoginPeriodicVerificationIntervalMs >= 0
&& lastSuccessfulLoginTime
< System.currentTimeMillis() - pipeReceiverLoginPeriodicVerificationIntervalMs;
}
protected TSStatus loginIfNecessary() {
if (shouldLogin()) {
final TSStatus permissionCheckStatus = login();
if (permissionCheckStatus.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to login, username = %s, response = %s.",
receiverId.get(),
username,
permissionCheckStatus);
return permissionCheckStatus;
} else {
lastSuccessfulLoginTime = System.currentTimeMillis();
}
}
return StatusUtils.OK;
}
protected abstract TSStatus login();
protected final TPipeTransferResp handleTransferFilePiece(
final PipeTransferFilePieceReq req,
final boolean isRequestThroughAirGap,
final boolean isSingleFile) {
try {
updateWritingFileIfNeeded(req.getFileName(), isSingleFile);
// If the request is through air gap, the sender will resend the file piece from the beginning
// of the file. So the receiver should reset the offset of the writing file to the beginning
// of the file.
if (isRequestThroughAirGap && req.getStartWritingOffset() < writingFileWriter.length()) {
writingFileWriter.setLength(req.getStartWritingOffset());
}
if (!isWritingFileOffsetCorrect(req.getStartWritingOffset())) {
if (!writingFile.getName().endsWith(TsFileConstant.TSFILE_SUFFIX)) {
// If the file is a tsFile, then the content will not be changed for a specific filename.
// However, for other files (mod, snapshot, etc.) the content varies for the same name in
// different times, then we must rewrite the file to apply the newest version.
writingFileWriter.setLength(0);
}
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_OFFSET_RESET,
String.format(
"Request sender to reset file reader's offset from %s to %s.",
req.getStartWritingOffset(), writingFileWriter.length()));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: File offset reset requested by receiver, response status = %s.",
receiverId.get(),
status);
return PipeTransferFilePieceResp.toTPipeTransferResp(status, writingFileWriter.length());
}
writingFileWriter.write(req.getFilePiece());
return PipeTransferFilePieceResp.toTPipeTransferResp(
RpcUtils.SUCCESS_STATUS, writingFileWriter.length());
} catch (final Exception e) {
PipeLogger.log(
LOGGER::warn,
e,
"Receiver id = %s: Failed to write file piece from req %s.",
receiverId.get(),
req);
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format("Failed to write file piece, because %s", e.getMessage()));
try {
return PipeTransferFilePieceResp.toTPipeTransferResp(
status, PipeTransferFilePieceResp.ERROR_END_OFFSET);
} catch (Exception ex) {
return PipeTransferFilePieceResp.toTPipeTransferResp(status);
}
}
}
protected final void updateWritingFileIfNeeded(final String fileName, final boolean isSingleFile)
throws IOException {
if (isFileExistedAndNameCorrect(fileName)) {
return;
}
LOGGER.info(
"Receiver id = {}: Writing file {} is not existed or name is not correct, try to create it. "
+ "Current writing file is {}.",
receiverId.get(),
fileName,
writingFile == null ? "null" : writingFile.getPath());
closeCurrentWritingFileWriter(!isSingleFile);
// If there are multiple files we can not delete the current file
// instead they will be deleted after seal request
if (writingFile != null && isSingleFile) {
deleteCurrentWritingFile();
}
// Make sure receiver file dir exists
// This may be useless, because receiver file dir is created when handshake. just in case.
if (!receiverFileDirWithIdSuffix.get().exists()) {
if (receiverFileDirWithIdSuffix.get().mkdirs()) {
LOGGER.info(
"Receiver id = {}: Receiver file dir {} was created.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
} else {
LOGGER.error(
"Receiver id = {}: Failed to create receiver file dir {}.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
}
}
writingFile = new File(receiverFileDirWithIdSuffix.get(), fileName);
writingFileWriter = new RandomAccessFile(writingFile, "rw");
LOGGER.info(
"Receiver id = {}: Writing file {} was created. Ready to write file pieces.",
receiverId.get(),
writingFile.getPath());
}
private boolean isFileExistedAndNameCorrect(final String fileName) {
return writingFile != null && writingFile.exists() && writingFile.getName().equals(fileName);
}
private void closeCurrentWritingFileWriter(final boolean fsyncBeforeClose) {
if (writingFileWriter != null) {
try {
if (PIPE_CONFIG.getPipeFileReceiverFsyncEnabled() && fsyncBeforeClose) {
writingFileWriter.getFD().sync();
}
writingFileWriter.close();
LOGGER.info(
"Receiver id = {}: Current writing file writer {} was closed, length {}.",
receiverId.get(),
writingFile == null ? "null" : writingFile.getPath(),
writingFile == null ? 0 : writingFile.length());
} catch (final Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to close current writing file writer %s, because %s.",
receiverId.get(),
writingFile == null ? "null" : writingFile.getPath(),
e.getMessage(),
e);
}
writingFileWriter = null;
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Current writing file writer is null. No need to close.",
receiverId.get());
}
}
}
private void deleteCurrentWritingFile() {
if (writingFile != null) {
deleteFile(writingFile);
writingFile = null;
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Current writing file is null. No need to delete.", receiverId.get());
}
}
}
private void deleteFile(final File file) {
if (file.exists()) {
try {
RetryUtils.retryOnException(() -> FileUtils.delete(file));
LOGGER.info(
"Receiver id = {}: Original writing file {} was deleted.",
receiverId.get(),
file.getPath());
} catch (final Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to delete original writing file %s, because %s.",
receiverId.get(),
file.getPath(),
e.getMessage(),
e);
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Original file {} is not existed. No need to delete.",
receiverId.get(),
file.getPath());
}
}
}
private boolean isWritingFileOffsetCorrect(final long offset) throws IOException {
final boolean offsetCorrect = writingFileWriter.length() == offset;
if (!offsetCorrect) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Writing file %s's offset is %s, but request sender's offset is %s.",
receiverId.get(),
writingFile.getPath(),
writingFileWriter.length(),
offset);
}
return offsetCorrect;
}
protected final TPipeTransferResp handleTransferFileSealV1(final PipeTransferFileSealReqV1 req) {
try {
if (!isWritingFileAvailable()) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format(
"Failed to seal file, because writing file %s is not available.", writingFile));
PipeLogger.log(LOGGER::warn, status.getMessage());
return new TPipeTransferResp(status);
}
final TPipeTransferResp resp = checkFinalFileSeal(req.getFileName(), req.getFileLength());
if (Objects.nonNull(resp)) {
return resp;
}
final String fileAbsolutePath = writingFile.getAbsolutePath();
// Sync here is necessary to ensure that the data is written to the disk. Or data region may
// load the file before the data is written to the disk and cause unexpected behavior after
// system restart. (e.g., empty file in data region's data directory)
if (PIPE_CONFIG.getPipeFileReceiverFsyncEnabled()) {
writingFileWriter.getFD().sync();
}
// 1. The writing file writer must be closed, otherwise it may cause concurrent errors during
// the process of loading tsfile when parsing tsfile.
//
// 2. The writing file must be set to null, otherwise if the next passed tsfile has the same
// name as the current tsfile, it will bypass the judgment logic of
// updateWritingFileIfNeeded#isFileExistedAndNameCorrect, and continue to write to the already
// loaded file. Since the writing file writer has already been closed, it will throw a Stream
// Close exception.
writingFileWriter.close();
writingFileWriter = null;
// writingFile will be deleted after load if no exception occurs
writingFile = null;
final TSStatus status = loadFileV1(req, fileAbsolutePath);
if (status.getCode() == TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
LOGGER.info(
"Receiver id = {}: Seal file {} successfully.", receiverId.get(), fileAbsolutePath);
} else {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, because %s.",
receiverId.get(),
fileAbsolutePath,
status.getMessage());
}
return new TPipeTransferResp(status);
} catch (final Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s from req %s.",
receiverId.get(),
writingFile,
req,
e);
return new TPipeTransferResp(
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format("Failed to seal file %s because %s", writingFile, e.getMessage())));
} finally {
// If the writing file is not sealed successfully, the writing file will be deleted.
// All pieces of the writing file and its mod (if exists) should be retransmitted by the
// sender.
closeCurrentWritingFileWriter(false);
deleteCurrentWritingFile();
}
}
// Support null in fileName list, which means that this file is optional and is currently absent
protected final TPipeTransferResp handleTransferFileSealV2(final PipeTransferFileSealReqV2 req) {
final List<String> fileNames = req.getFileNames();
final List<File> files =
fileNames.stream()
.map(
fileName ->
Objects.nonNull(fileName)
? new File(receiverFileDirWithIdSuffix.get(), fileName)
: null)
.collect(Collectors.toList());
try {
if (!isWritingFileAvailable()) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format(
"Failed to seal file %s, because writing file %s is not available.",
req.getFileNames(), writingFile));
PipeLogger.log(LOGGER::warn, status.getMessage());
return new TPipeTransferResp(status);
}
// Any of the transferred files cannot be empty, or else the receiver
// will not sense this file because no pieces are sent
for (int i = 0; i < fileNames.size(); ++i) {
final String fileName = fileNames.get(i);
if (Objects.nonNull(fileName)) {
final TPipeTransferResp resp =
i == fileNames.size() - 1
? checkFinalFileSeal(fileName, req.getFileLengths().get(i))
: checkNonFinalFileSeal(files.get(i), fileName, req.getFileLengths().get(i));
if (Objects.nonNull(resp)) {
return resp;
}
}
}
// Sync here is necessary to ensure that the data is written to the disk. Or data region may
// load the file before the data is written to the disk and cause unexpected behavior after
// system restart. (e.g., empty file in data region's data directory)
if (PIPE_CONFIG.getPipeFileReceiverFsyncEnabled()) {
writingFileWriter.getFD().sync();
}
// 1. The writing file writer must be closed, otherwise it may cause concurrent errors during
// the process of loading tsfile when parsing tsfile.
//
// 2. The writing file must be set to null, otherwise if the next passed tsfile has the same
// name as the current tsfile, it will bypass the judgment logic of
// updateWritingFileIfNeeded#isFileExistedAndNameCorrect, and continue to write to the already
// loaded file. Since the writing file writer has already been closed, it will throw a Stream
// Close exception.
writingFileWriter.close();
writingFileWriter = null;
// WritingFile will be deleted after load if no exception occurs
writingFile = null;
final List<String> fileAbsolutePaths =
files.stream()
.map(file -> Objects.nonNull(file) ? file.getAbsolutePath() : null)
.collect(Collectors.toList());
final TSStatus status = loadFileV2(req, fileAbsolutePaths);
if (status.getCode() == TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
LOGGER.info(
"Receiver id = {}: Seal file {} successfully.", receiverId.get(), fileAbsolutePaths);
} else {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, status is %s.",
receiverId.get(),
fileAbsolutePaths,
status);
}
return new TPipeTransferResp(status);
} catch (final Exception e) {
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s from req %s.",
receiverId.get(),
files,
req,
e);
return new TPipeTransferResp(
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format("Failed to seal file %s because %s", files, e.getMessage())));
} finally {
// If the writing file is not sealed successfully, the writing file will be deleted.
// All pieces of the writing file and its mod(if exists) should be retransmitted by the
// sender.
closeCurrentWritingFileWriter(false);
// Clear the directory instead of only deleting the referenced files in seal request
// to avoid previously undeleted file being redundant when transferring multi files
IoTDBReceiverAgent.cleanPipeReceiverDir(receiverFileDirWithIdSuffix.get());
}
}
private TPipeTransferResp checkNonFinalFileSeal(
final File file, final String fileName, final long fileLength) throws IOException {
if (!file.exists()) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format("Failed to seal file %s, the file does not exist.", fileName));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, because the file does not exist.",
receiverId.get(),
fileName);
return new TPipeTransferResp(status);
}
if (fileLength != file.length()) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format(
"Failed to seal file %s, because the length of file is not correct. "
+ "The original file has length %s, but receiver file has length %s.",
fileName, fileLength, writingFileWriter.length()));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, because the length of file is not correct. "
+ "The original file has length %s, but receiver file has length %s.",
receiverId.get(),
fileName,
fileLength,
writingFileWriter.length());
return new TPipeTransferResp(status);
}
return null;
}
private TPipeTransferResp checkFinalFileSeal(final String fileName, final long fileLength)
throws IOException {
if (!isFileExistedAndNameCorrect(fileName)) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format(
"Failed to seal file %s, because writing file is %s.", fileName, writingFile));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, because writing file is %s.",
receiverId.get(),
fileName,
writingFile);
return new TPipeTransferResp(status);
}
if (!isWritingFileOffsetCorrect(fileLength)) {
final TSStatus status =
RpcUtils.getStatus(
TSStatusCode.PIPE_TRANSFER_FILE_ERROR,
String.format(
"Failed to seal file %s, because the length of file is not correct. "
+ "The original file has length %s, but receiver file has length %s.",
fileName, fileLength, writingFileWriter.length()));
PipeLogger.log(
LOGGER::warn,
"Receiver id = %s: Failed to seal file %s, because the length of file is not correct. "
+ "The original file has length %s, but receiver file has length %s.",
receiverId.get(),
fileName,
fileLength,
writingFileWriter.length());
return new TPipeTransferResp(status);
}
return null;
}
private boolean isWritingFileAvailable() {
final boolean isWritingFileAvailable =
writingFile != null && writingFile.exists() && writingFileWriter != null;
if (!isWritingFileAvailable) {
LOGGER.info(
"Receiver id = {}: Writing file {} is not available. "
+ "Writing file is null: {}, writing file exists: {}, writing file writer is null: {}.",
receiverId.get(),
writingFile,
writingFile == null,
writingFile != null && writingFile.exists(),
writingFileWriter == null);
}
return isWritingFileAvailable;
}
protected abstract TSStatus loadFileV1(
final PipeTransferFileSealReqV1 req, final String fileAbsolutePath) throws IOException;
protected abstract TSStatus loadFileV2(
final PipeTransferFileSealReqV2 req, final List<String> fileAbsolutePaths)
throws IOException, IllegalPathException;
@Override
public synchronized void handleExit() {
if (writingFileWriter != null) {
try {
writingFileWriter.close();
LOGGER.info(
"Receiver id = {}: Handling exit: Writing file writer was closed.", receiverId.get());
} catch (Exception e) {
LOGGER.warn(
"Receiver id = {}: Handling exit: Close writing file writer error.",
receiverId.get(),
e);
}
writingFileWriter = null;
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Handling exit: Writing file writer is null. No need to close.",
receiverId.get());
}
}
if (writingFile != null) {
try {
RetryUtils.retryOnException(() -> FileUtils.delete(writingFile));
LOGGER.info(
"Receiver id = {}: Handling exit: Writing file {} was deleted.",
receiverId.get(),
writingFile.getPath());
} catch (Exception e) {
LOGGER.warn(
"Receiver id = {}: Handling exit: Delete writing file {} error.",
receiverId.get(),
writingFile.getPath(),
e);
}
writingFile = null;
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Handling exit: Writing file is null. No need to delete.",
receiverId.get());
}
}
// Clear the original receiver file dir if exists
if (receiverFileDirWithIdSuffix.get() != null) {
if (receiverFileDirWithIdSuffix.get().exists()) {
try {
RetryUtils.retryOnException(
() -> {
FileUtils.deleteDirectory(receiverFileDirWithIdSuffix.get());
return null;
});
LOGGER.info(
"Receiver id = {}: Handling exit: Original receiver file dir {} was deleted.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
} catch (Exception e) {
LOGGER.warn(
"Receiver id = {}: Handling exit: Delete original receiver file dir {} error.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath(),
e);
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Handling exit: Original receiver file dir {} does not exist. No need to delete.",
receiverId.get(),
receiverFileDirWithIdSuffix.get().getPath());
}
}
receiverFileDirWithIdSuffix.set(null);
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Receiver id = {}: Handling exit: Original receiver file dir is null. No need to delete.",
receiverId.get());
}
}
// Close the session
closeSession();
LOGGER.info("Receiver id = {}: Handling exit: Receiver exited.", receiverId.get());
if (originalThreadName != null) {
Thread.currentThread().setName(originalThreadName);
}
}
protected abstract void closeSession();
}
|
apache/jackrabbit-oak | 38,171 | oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/VersionGCTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.commons.collections.IterableUtils;
import org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.VersionGCStats;
import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.stats.Clock;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import static java.lang.System.currentTimeMillis;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.commons.lang3.reflect.FieldUtils.readDeclaredField;
import static org.apache.jackrabbit.oak.plugins.document.Collection.SETTINGS;
import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreService.DEFAULT_FGC_BATCH_SIZE;
import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreService.DEFAULT_FGC_PROGRESS_SIZE;
import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreService.DEFAULT_FULL_GC_MAX_AGE;
import static org.apache.jackrabbit.oak.plugins.document.FullGCHelper.disableFullGC;
import static org.apache.jackrabbit.oak.plugins.document.FullGCHelper.disableFullGCDryRun;
import static org.apache.jackrabbit.oak.plugins.document.FullGCHelper.enableFullGC;
import static org.apache.jackrabbit.oak.plugins.document.FullGCHelper.enableFullGCDryRun;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_FULL_GC_DRY_RUN_DOCUMENT_ID_PROP;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_FULL_GC_DRY_RUN_TIMESTAMP_PROP;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP;
import static org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.SETTINGS_COLLECTION_ID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.slf4j.helpers.MessageFormatter.arrayFormat;
public class VersionGCTest {
@Rule
public final DocumentMKBuilderProvider builderProvider = new DocumentMKBuilderProvider();
private ExecutorService execService;
private final TestStore store = new TestStore();
private DocumentNodeStore ns;
private VersionGarbageCollector gc;
@Before
public void setUp() throws Exception {
execService = Executors.newCachedThreadPool();
Clock clock = new Clock.Virtual();
clock.waitUntil(System.currentTimeMillis());
Revision.setClock(clock);
ns = builderProvider.newBuilder()
.clock(clock)
.setLeaseCheckMode(LeaseCheckMode.DISABLED)
.setDocumentStore(store)
.setAsyncDelay(0)
.getNodeStore();
// create test content
createNode("foo");
removeNode("foo");
// wait one hour
clock.waitUntil(clock.getTime() + HOURS.toMillis(1));
gc = ns.getVersionGarbageCollector();
}
@After
public void tearDown() throws Exception {
disableFullGC(gc);
disableFullGCDryRun(gc);
execService.shutdown();
execService.awaitTermination(1, MINUTES);
}
@AfterClass
public static void resetClock() {
Revision.resetClockToDefault();
}
@Test
public void failParallelGC() throws Exception {
// block gc call
store.semaphore.acquireUninterruptibly();
Future<VersionGCStats> stats = gc();
boolean gcBlocked = false;
for (int i = 0; i < 10; i ++) {
if (store.semaphore.hasQueuedThreads()) {
gcBlocked = true;
break;
}
Thread.sleep(100);
}
assertTrue(gcBlocked);
// now try to trigger another GC
try {
FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
fail("must throw an IOException");
} catch (IOException e) {
assertTrue(e.getMessage().contains("already running"));
} finally {
store.semaphore.release();
stats.get();
}
}
@Test
public void cancel() throws Exception {
// block gc call
store.semaphore.acquireUninterruptibly();
Future<VersionGCStats> stats = gc();
boolean gcBlocked = false;
for (int i = 0; i < 10; i ++) {
if (store.semaphore.hasQueuedThreads()) {
gcBlocked = true;
break;
}
Thread.sleep(100);
}
assertTrue(gcBlocked);
// now cancel the GC
gc.cancel();
store.semaphore.release();
assertTrue(stats.get().canceled);
}
@Test
public void cancelMustNotUpdateLastOldestTimeStamp() throws Exception {
// get previous entry from SETTINGS
String versionGCId = "versionGC";
String lastOldestTimeStampProp = "lastOldestTimeStamp";
Document statusBefore = store.find(Collection.SETTINGS, versionGCId);
// block gc call
store.semaphore.acquireUninterruptibly();
Future<VersionGCStats> stats = gc();
boolean gcBlocked = false;
for (int i = 0; i < 10; i ++) {
if (store.semaphore.hasQueuedThreads()) {
gcBlocked = true;
break;
}
Thread.sleep(100);
}
assertTrue(gcBlocked);
// now cancel the GC
gc.cancel();
store.semaphore.release();
assertTrue(stats.get().canceled);
// ensure a canceled GC doesn't update that versionGC SETTINGS entry
Document statusAfter = store.find(Collection.SETTINGS, "versionGC");
if (statusBefore == null) {
assertNull(statusAfter);
} else {
assertNotNull(statusAfter);
assertEquals(
"canceled GC shouldn't change the " + lastOldestTimeStampProp + " property on " + versionGCId
+ " settings entry",
statusBefore.get(lastOldestTimeStampProp), statusAfter.get(lastOldestTimeStampProp));
}
}
// OAK-10199
@Test
public void cancelMustNotUpdateLastOldestModifiedTimeStamp() throws Exception {
// get previous entry from SETTINGS
String versionGCId = SETTINGS_COLLECTION_ID;
String fullGCTimestamp = SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP;
enableFullGC(gc);
FullGCHelper.gc(gc, 30, SECONDS);
Document statusBefore = store.find(SETTINGS, versionGCId);
// block gc call
store.semaphore.acquireUninterruptibly();
Future<VersionGCStats> stats = gc();
boolean gcBlocked = false;
for (int i = 0; i < 10; i ++) {
if (store.semaphore.hasQueuedThreads()) {
gcBlocked = true;
break;
}
Thread.sleep(100);
}
assertTrue(gcBlocked);
// now cancel the GC
gc.cancel();
store.semaphore.release();
assertTrue(stats.get().canceled);
// ensure a canceled GC doesn't update that versionGC SETTINGS entry
Document statusAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
if (statusBefore == null) {
assertNull(statusAfter);
} else {
assertNotNull(statusAfter);
assertEquals(
"canceled GC shouldn't change the " + fullGCTimestamp + " property on " + versionGCId
+ " settings entry",
statusBefore.get(fullGCTimestamp), statusAfter.get(fullGCTimestamp));
}
}
@Test
public void cancelMustNotUpdateLastOldestModifiedDocId() throws Exception {
// get previous entry from SETTINGS
String versionGCId = SETTINGS_COLLECTION_ID;
String oldestModifiedDocId = SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP;
enableFullGC(gc);
FullGCHelper.gc(gc, 30, SECONDS);
Document statusBefore = store.find(SETTINGS, versionGCId);
// block gc call
store.semaphore.acquireUninterruptibly();
Future<VersionGCStats> stats = gc();
boolean gcBlocked = false;
for (int i = 0; i < 10; i ++) {
if (store.semaphore.hasQueuedThreads()) {
gcBlocked = true;
break;
}
Thread.sleep(100);
}
assertTrue(gcBlocked);
// now cancel the GC
gc.cancel();
store.semaphore.release();
assertTrue(stats.get().canceled);
// ensure a canceled GC doesn't update that versionGC SETTINGS entry
Document statusAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
if (statusBefore == null) {
assertNull(statusAfter);
} else {
assertNotNull(statusAfter);
assertEquals(
"canceled GC shouldn't change the " + oldestModifiedDocId + " property on " + versionGCId
+ " settings entry",
statusBefore.get(oldestModifiedDocId), statusAfter.get(oldestModifiedDocId));
}
}
// END - OAK-10199
// OAK-10370
@Test
public void dryRunMustNotUpdateLastOldestModifiedTimeStamp() throws Exception {
// get previous entry from SETTINGS
String versionGCId = SETTINGS_COLLECTION_ID;
String fullGCTimestamp = SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP;
enableFullGC(gc);
FullGCHelper.gc(gc, 30, SECONDS);
Document statusBefore = store.find(SETTINGS, versionGCId);
// now run GC in dryRun mode
enableFullGCDryRun(gc);
FullGCHelper.gc(gc, 30, SECONDS);
// ensure a dryRun GC doesn't update that versionGC SETTINGS entries
Document statusAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(statusAfter);
assert statusBefore != null;
assertEquals("canceled GC shouldn't change the " + fullGCTimestamp + " property on " + versionGCId
+ " settings entry", statusBefore.get(fullGCTimestamp), statusAfter.get(fullGCTimestamp));
}
@Test
public void dryRunMustNotUpdateLastOldestModifiedDocId() throws Exception {
// get previous entry from SETTINGS
String versionGCId = SETTINGS_COLLECTION_ID;
String oldestModifiedDocId = SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP;
enableFullGC(gc);
FullGCHelper.gc(gc, 30, SECONDS);
final Document statusBefore = store.find(SETTINGS, versionGCId);
// now run GC in dryRun mode
enableFullGCDryRun(gc);
FullGCHelper.gc(gc, 30, SECONDS);
// ensure a dryRun GC doesn't update that versionGC SETTINGS entry
final Document statusAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(statusAfter);
assert statusBefore != null;
assertEquals("canceled GC shouldn't change the " + oldestModifiedDocId + " property on " + versionGCId
+ " settings entry", statusBefore.get(oldestModifiedDocId), statusAfter.get(oldestModifiedDocId));
}
// END - OAK-10370
@Test
public void getInfo() throws Exception {
FullGCHelper.gc(gc, 1, TimeUnit.HOURS);
gc.getInfo(1, TimeUnit.HOURS);
}
@Test
public void gcMonitorStatusUpdates() throws Exception {
TestGCMonitor monitor = new TestGCMonitor();
gc.setGCMonitor(monitor);
FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
List<String> expected = List.of("INITIALIZING",
"COLLECTING", "CHECKING", "COLLECTING", "DELETING", "SORTING",
"DELETING", "UPDATING", "SPLITS_CLEANUP", "IDLE");
assertEquals(expected, monitor.getStatusMessages());
}
@Test
public void gcMonitorInfoMessages() throws Exception {
TestGCMonitor monitor = new TestGCMonitor();
gc.setGCMonitor(monitor);
FullGCHelper.gc(gc, 2, TimeUnit.HOURS);
List<String> infoMessages = monitor.getInfoMessages();
assertEquals(3, infoMessages.size());
assertTrue(infoMessages.get(0).startsWith("Start "));
assertTrue(infoMessages.get(1).startsWith("Looking at revisions"));
assertTrue(infoMessages.get(2).startsWith("Revision garbage collection finished"));
}
@Test
public void findVersionGC() throws Exception {
store.findVersionGC.set(0);
FullGCHelper.gc(gc, 1, TimeUnit.HOURS);
// must only read once
assertEquals(1, store.findVersionGC.get());
}
@Test
public void recommendationsOnHugeBacklog() throws Exception {
VersionGCOptions options = gc.getOptions();
final long oneYearAgo = ns.getClock().getTime() - TimeUnit.DAYS.toMillis(365);
final long twelveTimesTheLimit = options.collectLimit * 12;
final long secondsPerDay = TimeUnit.DAYS.toMillis(1);
VersionGCSupport localgcsupport = fakeVersionGCSupport(ns.getDocumentStore(), oneYearAgo, twelveTimesTheLimit);
VersionGCRecommendations rec = new VersionGCRecommendations(secondsPerDay, ns.getCheckpoints(), true, ns.getClock(),
localgcsupport, options, new TestGCMonitor(), false, false, SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE));
// should select a duration of roughly one month
long duration= rec.scope.getDurationMs();
assertTrue(duration <= TimeUnit.DAYS.toMillis(33));
assertTrue(duration >= TimeUnit.DAYS.toMillis(28));
VersionGCStats stats = new VersionGCStats();
stats.limitExceeded = true;
rec.evaluate(stats);
assertTrue(stats.needRepeat);
rec = new VersionGCRecommendations(secondsPerDay, ns.getCheckpoints(), true, ns.getClock(), localgcsupport,
options, new TestGCMonitor(), false, false, SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE));
// new duration should be half
long nduration = rec.scope.getDurationMs();
assertTrue(nduration == duration / 2);
}
// OAK-8448: test that after shrinking the scope to the minimum and after
// successful runs, scope will be expanded again
@Test
public void expandIntervalAgain() throws Exception {
VersionGCOptions options = gc.getOptions();
VersionGCRecommendations rec;
VersionGCStats stats;
VersionGCSupport localgcsupport;
GCMonitor testmonitor = new TestGCMonitor();
int days = 365;
long secondsPerDay = TimeUnit.DAYS.toMillis(1);
long oldestDeleted = ns.getClock().getTime() - TimeUnit.DAYS.toMillis(days);
// one per second
long deletedCount = TimeUnit.DAYS.toSeconds(days);
localgcsupport = fakeVersionGCSupport(ns.getDocumentStore(), oldestDeleted, deletedCount);
// loop until the recommended interval is at 60s (precisionMS)
do {
rec = new VersionGCRecommendations(secondsPerDay, ns.getCheckpoints(), true, ns.getClock(), localgcsupport,
options, testmonitor, false, false, SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE));
stats = new VersionGCStats();
stats.limitExceeded = true;
rec.evaluate(stats);
assertTrue(stats.needRepeat);
} while (rec.suggestedIntervalMs > options.precisionMs);
// loop with successful runs (1 node/sec interval deleted) and observe the interval
int iterations = 0;
int maxiterations = 1000;
do {
iterations += 1;
oldestDeleted = rec.scope.fromMs + rec.scope.getDurationMs();
int deleted = (int) (rec.scope.getDurationMs() / TimeUnit.SECONDS.toMillis(1));
deletedCount -= deleted;
localgcsupport = fakeVersionGCSupport(ns.getDocumentStore(), oldestDeleted, deletedCount);
rec = new VersionGCRecommendations(secondsPerDay, ns.getCheckpoints(), true, ns.getClock(), localgcsupport,
options, testmonitor, false, false, SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE));
stats = new VersionGCStats();
stats.limitExceeded = false;
stats.deletedDocGCCount = deleted;
stats.deletedLeafDocGCCount = 0;
rec.evaluate(stats);
} while (stats.needRepeat && iterations < maxiterations);
assertTrue("VersionGC should have finished after " + maxiterations + " iterations, but did not. Last scope was: "
+ rec.scope + ".", !stats.needRepeat);
}
// OAK-7378
@Test
public void recommendedInterval() throws Exception {
AtomicLong deletedOnceCountCalls = new AtomicLong();
// override the gc with a custom VersionGCSupport
gc = new VersionGarbageCollector(ns, new VersionGCSupport(store) {
@Override
public long getDeletedOnceCount() {
deletedOnceCountCalls.incrementAndGet();
return IterableUtils.size(Utils.getSelectedDocuments(store, NodeDocument.DELETED_ONCE, 1));
}
}, false, false, false);
// run first RGC
FullGCHelper.gc(gc, 1, TimeUnit.HOURS);
// afterwards there should be no more calls to getDeletedOnceCount()
deletedOnceCountCalls.set(0);
// try a couple of runs every five seconds to simulate continuous RGC
for (int i = 0; i < 10; i++) {
advanceClock(5, SECONDS);
FullGCHelper.gc(gc, 1, TimeUnit.HOURS);
assertEquals(0, deletedOnceCountCalls.get());
}
}
// OAK-10199
@Test
public void testFullGCDocumentRead_disabled() throws Exception {
disableFullGC(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
assertEquals(0, stats.fullGCDocsElapsed);
}
@Test
public void testFullGCDocumentRead_enabled() throws Exception {
enableFullGC(gc);
gc.setFullGcMaxAge(30, MINUTES);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
assertNotEquals(0, stats.fullGCDocsElapsed);
}
// OAK-10199 END
// OAK-10370
@Test
public void testFullGCDryRunModeEnabled() throws Exception {
enableFullGC(gc);
enableFullGCDryRun(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
assertTrue(stats.fullGCDryRunMode);
}
@Test
public void testResetFullGCDryRunMode() throws Exception {
enableFullGC(gc);
enableFullGCDryRun(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
// add dryRun fields data
final UpdateOp updateOp = new UpdateOp(SETTINGS_COLLECTION_ID, true);
updateOp.set(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_DOCUMENT_ID_PROP, "docId");
updateOp.set(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_TIMESTAMP_PROP, currentTimeMillis());
store.createOrUpdate(Collection.SETTINGS, updateOp);
final Document settingsBefore = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsBefore);
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_DOCUMENT_ID_PROP));
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_TIMESTAMP_PROP));
gc.resetDryRun();
final Document settingsAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter);
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_DOCUMENT_ID_PROP));
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DRY_RUN_TIMESTAMP_PROP));
}
// OAK-10370 END
@Test
public void testResetWithFullGCGeneration() throws Exception {
enableFullGC(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
final Document settingsBefore = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsBefore);
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
gc.resetFullGcIfGenChange(1);
final Document settingsAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter);
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(1L, settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
}
@Test
public void testResetWithFullGCGenerationIncrement() throws Exception {
enableFullGC(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
final Document settingsBefore = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsBefore);
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
gc.resetFullGcIfGenChange(1);
final Document settingsAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter);
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(1L, settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
// run full gc and set fullgc variables again in db
stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
// change generation to a higher value
gc.resetFullGcIfGenChange(2);
final Document settingsAfter2 = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter2);
assertNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(2L, settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
}
@Test
public void testResetWithFullGCGenerationDecrement() throws Exception {
enableFullGC(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
final Document settingsBefore = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsBefore);
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
gc.resetFullGcIfGenChange(2);
final Document settingsAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter);
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(2L, settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
// run full gc and set fullgc variables again in db
stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
// change generation to a lower value
gc.resetFullGcIfGenChange(1);
final Document settingsAfter2 = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter2);
assertNotNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(2L, settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
}
@Test
public void testResetWithFullGCGenerationSameValue() throws Exception {
enableFullGC(gc);
VersionGCStats stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
final Document settingsBefore = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsBefore);
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertNull(settingsBefore.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
gc.resetFullGcIfGenChange(2);
final Document settingsAfter = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter);
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNull(settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(2L, settingsAfter.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
// run full gc and set fullgc variables again in db
stats = FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
assertNotNull(stats);
// change generation to a same value
gc.resetFullGcIfGenChange(2);
final Document settingsAfter2 = store.find(SETTINGS, SETTINGS_COLLECTION_ID);
assertNotNull(settingsAfter2);
assertNotNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_DOCUMENT_ID_PROP));
assertNotNull(settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_TIMESTAMP_PROP));
assertEquals(2L, settingsAfter2.get(SETTINGS_COLLECTION_FULL_GC_GENERATION_PROP));
}
// OAK-10745
@Test
public void testVGCWithBatchSizeSmallerThanProgressSize() throws IllegalAccessException {
VersionGarbageCollector vgc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
0, 0, 1000, 5000, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(1000, readDeclaredField(vgc, "fullGCBatchSize", true));
assertEquals(5000, readDeclaredField(vgc, "fullGCProgressSize", true));
}
@Test
public void testVGCWithBatchSizeGreaterThanProgressSize() throws IllegalAccessException {
VersionGarbageCollector vgc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
0, 0, 20000, 15000, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(15000, readDeclaredField(vgc, "fullGCBatchSize", true));
assertEquals(15000, readDeclaredField(vgc, "fullGCProgressSize", true));
}
// OAK-10745 END
// OAK-10896
@Test
public void testVersionGCLoadGCModeConfigurationNotApplicable() {
int fullGcModeNotAllowedValue = 15;
int fullGcModeGapOrphans = 2;
// set fullGcMode to allowed value that is different than NONE
VersionGarbageCollector.setFullGcMode(fullGcModeGapOrphans);
// reinitialize VersionGarbageCollector with not allowed value
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeNotAllowedValue, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals("Starting VersionGarbageCollector with not applicable / not allowed value" +
"will set fullGcMode to default NONE", FullGCMode.NONE, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationNone() {
int fullGcModeNone = 0;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeNone, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.NONE, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationGapOrphans() {
int fullGcModeGapOrphans = 2;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeGapOrphans, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.GAP_ORPHANS, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationGapOrphansEmptyProperties() {
int fullGcModeGapOrphansEmptyProperties = 3;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeGapOrphansEmptyProperties, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.GAP_ORPHANS_EMPTYPROPS, VersionGarbageCollector.getFullGcMode());
}
// OAK-10896 END
@Test
public void testVersionGCLoadGCModeConfigurationAllOrphans() {
int fullGcModeAllOrphansEmptyProperties = 10;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeAllOrphansEmptyProperties, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.ALL_ORPHANS, VersionGarbageCollector.getFullGcMode());
}
// OAK-11439
@Test
public void testVersionGCLoadGCModeConfigurationAllOrphansEmptyProps() {
int fullGcModeAllOrphansEmptyProperties = 4;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeAllOrphansEmptyProperties, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.ALL_ORPHANS_EMPTYPROPS, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationAllOrphansEmptyPropsKeepOneUserProps() {
int fullGcModeAllOrphansEmptyPropertiesKeepOneUserProps = 5;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeAllOrphansEmptyPropertiesKeepOneUserProps, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.ORPHANS_EMPTYPROPS_KEEP_ONE_USER_PROPS, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationAllOrphansEmptyPropsKeepOneAllProps() {
int fullGcModeAllOrphansEmptyPropertiesKeepOneAllProps = 6;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeAllOrphansEmptyPropertiesKeepOneAllProps, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.ORPHANS_EMPTYPROPS_KEEP_ONE_ALL_PROPS, VersionGarbageCollector.getFullGcMode());
}
@Test
public void testVersionGCLoadGCModeConfigurationAllOrphansEmptyPropsUnmergedBC() {
int fullGcModeAllOrphansEmptyPropertiesUnmergedBC = 7;
VersionGarbageCollector gc = new VersionGarbageCollector(
ns, new VersionGCSupport(store), true, false, false,
fullGcModeAllOrphansEmptyPropertiesUnmergedBC, 0, DEFAULT_FGC_BATCH_SIZE, DEFAULT_FGC_PROGRESS_SIZE, TimeUnit.SECONDS.toMillis(DEFAULT_FULL_GC_MAX_AGE), 0);
assertEquals(FullGCMode.ORPHANS_EMPTYPROPS_UNMERGED_BC, VersionGarbageCollector.getFullGcMode());
}
// OAK-11439 END
private Future<VersionGCStats> gc() {
// run gc in a separate thread
return execService.submit(new Callable<VersionGCStats>() {
@Override
public VersionGCStats call() throws Exception {
return FullGCHelper.gc(gc, 30, TimeUnit.MINUTES);
}
});
}
private void removeNode(String name) throws CommitFailedException {
NodeBuilder builder = ns.getRoot().builder();
builder.child(name).remove();
merge(ns, builder);
}
private void createNode(String name) throws CommitFailedException {
NodeBuilder builder = ns.getRoot().builder();
builder.child(name);
merge(ns, builder);
}
private void merge(DocumentNodeStore store, NodeBuilder builder)
throws CommitFailedException {
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private void advanceClock(long time, TimeUnit unit)
throws InterruptedException {
Clock c = ns.getClock();
c.waitUntil(c.getTime() + unit.toMillis(time));
}
private class TestStore extends MemoryDocumentStore {
Semaphore semaphore = new Semaphore(1);
AtomicLong findVersionGC = new AtomicLong();
@NotNull
@Override
public <T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
String indexedProperty,
long startValue,
int limit) {
semaphore.acquireUninterruptibly();
try {
return super.query(collection, fromKey, toKey, indexedProperty, startValue, limit);
} finally {
semaphore.release();
}
}
@Override
public <T extends Document> T find(Collection<T> collection,
String key) {
if (collection == Collection.SETTINGS
&& key.equals("versionGC")) {
findVersionGC.incrementAndGet();
}
return super.find(collection, key);
}
}
private class TestGCMonitor implements GCMonitor {
final List<String> infoMessages = new ArrayList<>();
final List<String> statusMessages = new ArrayList<>();
@Override
public void info(String message, Object... arguments) {
this.infoMessages.add(arrayFormat(message, arguments).getMessage());
}
@Override
public void warn(String message, Object... arguments) {
}
@Override
public void error(String message, Exception exception) {
}
@Override
public void skipped(String reason, Object... arguments) {
}
@Override
public void compacted() {
}
@Override
public void cleaned(long reclaimedSize, long currentSize) {
}
@Override
public void updateStatus(String status) {
this.statusMessages.add(status);
}
public List<String> getInfoMessages() {
return this.infoMessages;
}
public List<String> getStatusMessages() {
return this.statusMessages;
}
}
private VersionGCSupport fakeVersionGCSupport(final DocumentStore ds, final long oldestDeleted, final long countDeleted) {
return new VersionGCSupport(ds) {
@Override
public long getOldestDeletedOnceTimestamp(Clock clock, long precisionMs) {
return oldestDeleted;
}
@Override
public long getDeletedOnceCount() {
return countDeleted;
}
};
}
}
|
apache/harmony | 38,105 | classlib/modules/security/src/main/java/common/org/apache/harmony/security/provider/cert/X509CertFactoryImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Alexander Y. Kleymenov
*/
package org.apache.harmony.security.provider.cert;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.security.cert.CRL;
import java.security.cert.CRLException;
import java.security.cert.CertPath;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactorySpi;
import java.security.cert.X509CRL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.harmony.luni.util.Base64;
import org.apache.harmony.security.asn1.ASN1Constants;
import org.apache.harmony.security.asn1.BerInputStream;
import org.apache.harmony.security.internal.nls.Messages;
import org.apache.harmony.security.pkcs7.ContentInfo;
import org.apache.harmony.security.pkcs7.SignedData;
import org.apache.harmony.security.x509.CertificateList;
/**
* X509 Certificate Factory Service Provider Interface Implementation.
* It supports CRLs and Certificates in (PEM) ASN.1 DER encoded form,
* and Certification Paths in PkiPath and PKCS7 formats.
* For Certificates and CRLs factory maintains the caching
* mechanisms allowing to speed up repeated Certificate/CRL
* generation.
* @see Cache
*/
public class X509CertFactoryImpl extends CertificateFactorySpi {
// number of leading/trailing bytes used for cert hash computation
private static int CERT_CACHE_SEED_LENGTH = 28;
// certificate cache
private static Cache CERT_CACHE = new Cache(CERT_CACHE_SEED_LENGTH);
// number of leading/trailing bytes used for crl hash computation
private static int CRL_CACHE_SEED_LENGTH = 24;
// crl cache
private static Cache CRL_CACHE = new Cache(CRL_CACHE_SEED_LENGTH);
/**
* Default constructor.
* Creates the instance of Certificate Factory SPI ready for use.
*/
public X509CertFactoryImpl() { }
/**
* Generates the X.509 certificate from the data in the stream.
* The data in the stream can be either in ASN.1 DER encoded X.509
* certificate, or PEM (Base64 encoding bounded by
* <code>"-----BEGIN CERTIFICATE-----"</code> at the beginning and
* <code>"-----END CERTIFICATE-----"</code> at the end) representation
* of the former encoded form.
*
* Before the generation the encoded form is looked up in
* the cache. If the cache contains the certificate with requested encoded
* form it is returned from it, otherwise it is generated by ASN.1
* decoder.
*
* @see java.security.cert.CertificateFactorySpi#engineGenerateCertificate(InputStream)
* method documentation for more info
*/
public Certificate engineGenerateCertificate(InputStream inStream)
throws CertificateException {
if (inStream == null) {
throw new CertificateException(Messages.getString("security.153")); //$NON-NLS-1$
}
try {
if (!inStream.markSupported()) {
// create the mark supporting wrapper
inStream = new RestoringInputStream(inStream);
}
// mark is needed to recognize the format of the provided encoding
// (ASN.1 or PEM)
inStream.mark(1);
// check whether the provided certificate is in PEM encoded form
if (inStream.read() == '-') {
// decode PEM, retrieve CRL
return getCertificate(decodePEM(inStream, CERT_BOUND_SUFFIX));
} else {
inStream.reset();
// retrieve CRL
return getCertificate(inStream);
}
} catch (IOException e) {
throw new CertificateException(e);
}
}
/**
* Generates the collection of the certificates on the base of provided
* via input stream encodings.
* @see java.security.cert.CertificateFactorySpi#engineGenerateCertificates(InputStream)
* method documentation for more info
*/
public Collection<? extends Certificate>
engineGenerateCertificates(InputStream inStream)
throws CertificateException {
if (inStream == null) {
throw new CertificateException(Messages.getString("security.153")); //$NON-NLS-1$
}
ArrayList result = new ArrayList();
try {
if (!inStream.markSupported()) {
// create the mark supporting wrapper
inStream = new RestoringInputStream(inStream);
}
// if it is PEM encoded form this array will contain the encoding
// so ((it is PEM) <-> (encoding != null))
byte[] encoding = null;
// The following by SEQUENCE ASN.1 tag, used for
// recognizing the data format
// (is it PKCS7 ContentInfo structure, X.509 Certificate, or
// unsupported encoding)
int second_asn1_tag = -1;
inStream.mark(1);
int ch;
while ((ch = inStream.read()) != -1) {
// check if it is PEM encoded form
if (ch == '-') { // beginning of PEM encoding ('-' char)
// decode PEM chunk and store its content (ASN.1 encoding)
encoding = decodePEM(inStream, FREE_BOUND_SUFFIX);
} else if (ch == 0x30) { // beginning of ASN.1 sequence (0x30)
encoding = null;
inStream.reset();
// prepare for data format determination
inStream.mark(CERT_CACHE_SEED_LENGTH);
} else { // unsupported data
if (result.size() == 0) {
throw new CertificateException(
Messages.getString("security.15F")); //$NON-NLS-1$
} else {
// it can be trailing user data,
// so keep it in the stream
inStream.reset();
return result;
}
}
// Check the data format
BerInputStream in = (encoding == null)
? new BerInputStream(inStream)
: new BerInputStream(encoding);
// read the next ASN.1 tag
second_asn1_tag = in.next(); // inStream position changed
if (encoding == null) {
// keep whole structure in the stream
inStream.reset();
}
// check if it is a TBSCertificate structure
if (second_asn1_tag != ASN1Constants.TAG_C_SEQUENCE) {
if (result.size() == 0) {
// there were not read X.509 Certificates, so
// break the cycle and check
// whether it is PKCS7 structure
break;
} else {
// it can be trailing user data,
// so return what we already read
return result;
}
} else {
if (encoding == null) {
result.add(getCertificate(inStream));
} else {
result.add(getCertificate(encoding));
}
}
// mark for the next iteration
inStream.mark(1);
}
if (result.size() != 0) {
// some Certificates have been read
return result;
} else if (ch == -1) {
throw new CertificateException(
Messages.getString("security.155")); //$NON-NLS-1$
}
// else: check if it is PKCS7
if (second_asn1_tag == ASN1Constants.TAG_OID) {
// it is PKCS7 ContentInfo structure, so decode it
ContentInfo info = (ContentInfo)
((encoding != null)
? ContentInfo.ASN1.decode(encoding)
: ContentInfo.ASN1.decode(inStream));
// retrieve SignedData
SignedData data = info.getSignedData();
if (data == null) {
throw new CertificateException(
Messages.getString("security.154")); //$NON-NLS-1$
}
List certs = data.getCertificates();
if (certs != null) {
for (int i = 0; i < certs.size(); i++) {
result.add(new X509CertImpl(
(org.apache.harmony.security.x509.Certificate)
certs.get(i)));
}
}
return result;
}
// else: Unknown data format
throw new CertificateException(
Messages.getString("security.15F")); //$NON-NLS-1$
} catch (IOException e) {
throw new CertificateException(e);
}
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGenerateCRL(InputStream)
* method documentation for more info
*/
public CRL engineGenerateCRL(InputStream inStream)
throws CRLException {
if (inStream == null) {
throw new CRLException(Messages.getString("security.153")); //$NON-NLS-1$
}
try {
if (!inStream.markSupported()) {
// Create the mark supporting wrapper
// Mark is needed to recognize the format
// of provided encoding form (ASN.1 or PEM)
inStream = new RestoringInputStream(inStream);
}
inStream.mark(1);
// check whether the provided crl is in PEM encoded form
if (inStream.read() == '-') {
// decode PEM, retrieve CRL
return getCRL(decodePEM(inStream, FREE_BOUND_SUFFIX));
} else {
inStream.reset();
// retrieve CRL
return getCRL(inStream);
}
} catch (IOException e) {
throw new CRLException(e);
}
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGenerateCRLs(InputStream)
* method documentation for more info
*/
public Collection<? extends CRL> engineGenerateCRLs(InputStream inStream)
throws CRLException {
if (inStream == null) {
throw new CRLException(Messages.getString("security.153")); //$NON-NLS-1$
}
ArrayList result = new ArrayList();
try {
if (!inStream.markSupported()) {
inStream = new RestoringInputStream(inStream);
}
// if it is PEM encoded form this array will contain the encoding
// so ((it is PEM) <-> (encoding != null))
byte[] encoding = null;
// The following by SEQUENCE ASN.1 tag, used for
// recognizing the data format
// (is it PKCS7 ContentInfo structure, X.509 CRL, or
// unsupported encoding)
int second_asn1_tag = -1;
inStream.mark(1);
int ch;
while ((ch = inStream.read()) != -1) {
// check if it is PEM encoded form
if (ch == '-') { // beginning of PEM encoding ('-' char)
// decode PEM chunk and store its content (ASN.1 encoding)
encoding = decodePEM(inStream, FREE_BOUND_SUFFIX);
} else if (ch == 0x30) { // beginning of ASN.1 sequence (0x30)
encoding = null;
inStream.reset();
// prepare for data format determination
inStream.mark(CRL_CACHE_SEED_LENGTH);
} else { // unsupported data
if (result.size() == 0) {
throw new CRLException(
Messages.getString("security.15F")); //$NON-NLS-1$
} else {
// it can be trailing user data,
// so keep it in the stream
inStream.reset();
return result;
}
}
// Check the data format
BerInputStream in = (encoding == null)
? new BerInputStream(inStream)
: new BerInputStream(encoding);
// read the next ASN.1 tag
second_asn1_tag = in.next();
if (encoding == null) {
// keep whole structure in the stream
inStream.reset();
}
// check if it is a TBSCertList structure
if (second_asn1_tag != ASN1Constants.TAG_C_SEQUENCE) {
if (result.size() == 0) {
// there were not read X.509 CRLs, so
// break the cycle and check
// whether it is PKCS7 structure
break;
} else {
// it can be trailing user data,
// so return what we already read
return result;
}
} else {
if (encoding == null) {
result.add(getCRL(inStream));
} else {
result.add(getCRL(encoding));
}
}
inStream.mark(1);
}
if (result.size() != 0) {
// the stream was read out
return result;
} else if (ch == -1) {
throw new CRLException(
Messages.getString("security.155")); //$NON-NLS-1$
}
// else: check if it is PKCS7
if (second_asn1_tag == ASN1Constants.TAG_OID) {
// it is PKCS7 ContentInfo structure, so decode it
ContentInfo info = (ContentInfo)
((encoding != null)
? ContentInfo.ASN1.decode(encoding)
: ContentInfo.ASN1.decode(inStream));
// retrieve SignedData
SignedData data = info.getSignedData();
if (data == null) {
throw new CRLException(
Messages.getString("security.154")); //$NON-NLS-1$
}
List crls = data.getCRLs();
if (crls != null) {
for (int i = 0; i < crls.size(); i++) {
result.add(new X509CRLImpl(
(CertificateList) crls.get(i)));
}
}
return result;
}
// else: Unknown data format
throw new CRLException(
Messages.getString("security.15F")); //$NON-NLS-1$
} catch (IOException e) {
throw new CRLException(e);
}
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGenerateCertPath(InputStream)
* method documentation for more info
*/
public CertPath engineGenerateCertPath(InputStream inStream)
throws CertificateException {
if (inStream == null) {
throw new CertificateException(
Messages.getString("security.153")); //$NON-NLS-1$
}
return engineGenerateCertPath(inStream, "PkiPath"); //$NON-NLS-1$
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGenerateCertPath(InputStream,String)
* method documentation for more info
*/
public CertPath engineGenerateCertPath(
InputStream inStream, String encoding) throws CertificateException {
if (inStream == null) {
throw new CertificateException(
Messages.getString("security.153")); //$NON-NLS-1$
}
if (!inStream.markSupported()) {
inStream = new RestoringInputStream(inStream);
}
try {
inStream.mark(1);
int ch;
// check if it is PEM encoded form
if ((ch = inStream.read()) == '-') {
// decode PEM chunk into ASN.1 form and decode CertPath object
return X509CertPathImpl.getInstance(
decodePEM(inStream, FREE_BOUND_SUFFIX), encoding);
} else if (ch == 0x30) { // ASN.1 Sequence
inStream.reset();
// decode ASN.1 form
return X509CertPathImpl.getInstance(inStream, encoding);
} else {
throw new CertificateException(
Messages.getString("security.15F")); //$NON-NLS-1$
}
} catch (IOException e) {
throw new CertificateException(e);
}
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGenerateCertPath(List)
* method documentation for more info
*/
public CertPath engineGenerateCertPath(List certificates)
throws CertificateException {
return new X509CertPathImpl(certificates);
}
/**
* @see java.security.cert.CertificateFactorySpi#engineGetCertPathEncodings()
* method documentation for more info
*/
public Iterator<String> engineGetCertPathEncodings() {
return X509CertPathImpl.encodings.iterator();
}
// ---------------------------------------------------------------------
// ------------------------ Staff methods ------------------------------
// ---------------------------------------------------------------------
private static byte[] pemBegin;
private static byte[] pemClose;
/**
* Code describing free format for PEM boundary suffix:
* "^-----BEGIN.*\n" at the beginning, and<br>
* "\n-----END.*(EOF|\n)$" at the end.
*/
private static byte[] FREE_BOUND_SUFFIX = null;
/**
* Code describing PEM boundary suffix for X.509 certificate:
* "^-----BEGIN CERTIFICATE-----\n" at the beginning, and<br>
* "\n-----END CERTIFICATE-----" at the end.
*/
private static byte[] CERT_BOUND_SUFFIX;
static {
// Initialise statics
try {
pemBegin = "-----BEGIN".getBytes("UTF-8"); //$NON-NLS-1$ //$NON-NLS-2$
pemClose = "-----END".getBytes("UTF-8"); //$NON-NLS-1$ //$NON-NLS-2$
CERT_BOUND_SUFFIX = " CERTIFICATE-----".getBytes("UTF-8"); //$NON-NLS-1$ //$NON-NLS-2$
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e.getMessage());
}
}
/**
* Method retrieves the PEM encoded data from the stream
* and returns its decoded representation.
* Method checks correctness of PEM boundaries. It supposes that
* the first '-' of the opening boundary has already been read from
* the stream. So first of all it checks that the leading bytes
* are equal to "-----BEGIN" boundary prefix. Than if boundary_suffix
* is not null, it checks that next bytes equal to boundary_suffix
* + new line char[s] ([CR]LF).
* If boundary_suffix parameter is null, method supposes free suffix
* format and skips any bytes until the new line.<br>
* After the opening boundary has been read and checked, the method
* read Base64 encoded data until closing PEM boundary is not reached.<br>
* Than it checks closing boundary - it should start with new line +
* "-----END" + boundary_suffix. If boundary_suffix is null,
* any characters are skipped until the new line.<br>
* After this any trailing new line characters are skipped from the stream,
* Base64 encoding is decoded and returned.
* @param inStream the stream containing the PEM encoding.
* @param boundary_suffix the suffix of expected PEM multipart
* boundary delimiter.<br>
* If it is null, that any character sequences are accepted.
* @throws IOException If PEM boundary delimiter does not comply
* with expected or some I/O or decoding problems occur.
*/
private byte[] decodePEM(InputStream inStream, byte[] boundary_suffix)
throws IOException {
int ch; // the char to be read
// check and skip opening boundary delimiter
// (first '-' is supposed as already read)
for (int i=1; i<pemBegin.length; i++) {
if (pemBegin[i] != (ch = inStream.read())) {
throw new IOException(
"Incorrect PEM encoding: '-----BEGIN"
+ ((boundary_suffix == null)
? "" : new String(boundary_suffix))
+ "' is expected as opening delimiter boundary.");
}
}
if (boundary_suffix == null) {
// read (skip) the trailing characters of
// the beginning PEM boundary delimiter
while ((ch = inStream.read()) != '\n') {
if (ch == -1) {
throw new IOException(
Messages.getString("security.156")); //$NON-NLS-1$
}
}
} else {
for (int i=0; i<boundary_suffix.length; i++) {
if (boundary_suffix[i] != inStream.read()) {
throw new IOException(
Messages.getString("security.15B", //$NON-NLS-1$
new String(boundary_suffix))); //$NON-NLS-1$
}
}
// read new line characters
if ((ch = inStream.read()) == '\r') {
// CR has been read, now read LF character
ch = inStream.read();
}
if (ch != '\n') {
throw new IOException(
Messages.getString("security.15B2")); //$NON-NLS-1$
}
}
int size = 1024; // the size of the buffer containing Base64 data
byte[] buff = new byte[size];
int index = 0;
// read bytes while ending boundary delimiter is not reached
while ((ch = inStream.read()) != '-') {
if (ch == -1) {
throw new IOException(
Messages.getString("security.157")); //$NON-NLS-1$
}
buff[index++] = (byte) ch;
if (index == size) {
// enlarge the buffer
byte[] newbuff = new byte[size+1024];
System.arraycopy(buff, 0, newbuff, 0, size);
buff = newbuff;
size += 1024;
}
}
if (buff[index-1] != '\n') {
throw new IOException(
Messages.getString("security.158")); //$NON-NLS-1$
}
// check and skip closing boundary delimiter prefix
// (first '-' was read)
for (int i=1; i<pemClose.length; i++) {
if (pemClose[i] != inStream.read()) {
throw new IOException(
Messages.getString("security.15B1", //$NON-NLS-1$
((boundary_suffix == null)
? ""
: new String(boundary_suffix)))); //$NON-NLS-1$
}
}
if (boundary_suffix == null) {
// read (skip) the trailing characters of
// the closing PEM boundary delimiter
while (((ch = inStream.read()) != -1)
&& (ch != '\n') && (ch != '\r')) {
}
} else {
for (int i=0; i<boundary_suffix.length; i++) {
if (boundary_suffix[i] != inStream.read()) {
throw new IOException(
Messages.getString("security.15B1", //$NON-NLS-1$
new String(boundary_suffix))); //$NON-NLS-1$
}
}
}
// skip trailing line breaks
inStream.mark(1);
while (((ch = inStream.read()) != -1) && (ch == '\n' || ch == '\r')) {
inStream.mark(1);
}
inStream.reset();
buff = Base64.decode(buff, index);
if (buff == null) {
throw new IOException(Messages.getString("security.159")); //$NON-NLS-1$
}
return buff;
};
/**
* Reads the data of specified length from source
* and returns it as an array.
* @return the byte array contained read data or
* null if the stream contains not enough data
* @throws IOException if some I/O error has been occurred.
*/
private static byte[] readBytes(InputStream source, int length)
throws IOException {
byte[] result = new byte[length];
for (int i=0; i<length; i++) {
int bytik = source.read();
if (bytik == -1) {
return null;
}
result[i] = (byte) bytik;
}
return result;
}
/**
* Returns the Certificate object corresponding to the provided encoding.
* Resulting object is retrieved from the cache
* if it contains such correspondence
* and is constructed on the base of encoding
* and stored in the cache otherwise.
* @throws IOException if some decoding errors occur
* (in the case of cache miss).
*/
private static Certificate getCertificate(byte[] encoding)
throws CertificateException, IOException {
if (encoding.length < CERT_CACHE_SEED_LENGTH) {
throw new CertificateException(
Messages.getString("security.152")); //$NON-NLS-1$
}
synchronized (CERT_CACHE) {
long hash = CERT_CACHE.getHash(encoding);
if (CERT_CACHE.contains(hash)) {
Certificate res =
(Certificate) CERT_CACHE.get(hash, encoding);
if (res != null) {
return res;
}
}
Certificate res = new X509CertImpl(encoding);
CERT_CACHE.put(hash, encoding, res);
return res;
}
}
/**
* Returns the Certificate object corresponding to the encoding provided
* by the stream.
* Resulting object is retrieved from the cache
* if it contains such correspondence
* and is constructed on the base of encoding
* and stored in the cache otherwise.
* @throws IOException if some decoding errors occur
* (in the case of cache miss).
*/
private static Certificate getCertificate(InputStream inStream)
throws CertificateException, IOException {
synchronized (CERT_CACHE) {
inStream.mark(CERT_CACHE_SEED_LENGTH);
// read the prefix of the encoding
byte[] buff = readBytes(inStream, CERT_CACHE_SEED_LENGTH);
inStream.reset();
if (buff == null) {
throw new CertificateException(
Messages.getString("security.152")); //$NON-NLS-1$
}
long hash = CERT_CACHE.getHash(buff);
if (CERT_CACHE.contains(hash)) {
byte[] encoding = new byte[BerInputStream.getLength(buff)];
if (encoding.length < CERT_CACHE_SEED_LENGTH) {
throw new CertificateException(
Messages.getString("security.15B3")); //$NON-NLS-1$
}
inStream.read(encoding);
Certificate res = (Certificate) CERT_CACHE.get(hash, encoding);
if (res != null) {
return res;
}
res = new X509CertImpl(encoding);
CERT_CACHE.put(hash, encoding, res);
return res;
} else {
inStream.reset();
Certificate res = new X509CertImpl(inStream);
CERT_CACHE.put(hash, res.getEncoded(), res);
return res;
}
}
}
/**
* Returns the CRL object corresponding to the provided encoding.
* Resulting object is retrieved from the cache
* if it contains such correspondence
* and is constructed on the base of encoding
* and stored in the cache otherwise.
* @throws IOException if some decoding errors occur
* (in the case of cache miss).
*/
private static CRL getCRL(byte[] encoding)
throws CRLException, IOException {
if (encoding.length < CRL_CACHE_SEED_LENGTH) {
throw new CRLException(
Messages.getString("security.152")); //$NON-NLS-1$
}
synchronized (CRL_CACHE) {
long hash = CRL_CACHE.getHash(encoding);
if (CRL_CACHE.contains(hash)) {
X509CRL res = (X509CRL) CRL_CACHE.get(hash, encoding);
if (res != null) {
return res;
}
}
X509CRL res = new X509CRLImpl(encoding);
CRL_CACHE.put(hash, encoding, res);
return res;
}
}
/**
* Returns the CRL object corresponding to the encoding provided
* by the stream.
* Resulting object is retrieved from the cache
* if it contains such correspondence
* and is constructed on the base of encoding
* and stored in the cache otherwise.
* @throws IOException if some decoding errors occur
* (in the case of cache miss).
*/
private static CRL getCRL(InputStream inStream)
throws CRLException, IOException {
synchronized (CRL_CACHE) {
inStream.mark(CRL_CACHE_SEED_LENGTH);
byte[] buff = readBytes(inStream, CRL_CACHE_SEED_LENGTH);
// read the prefix of the encoding
inStream.reset();
if (buff == null) {
throw new CRLException(
Messages.getString("security.152")); //$NON-NLS-1$
}
long hash = CRL_CACHE.getHash(buff);
if (CRL_CACHE.contains(hash)) {
byte[] encoding = new byte[BerInputStream.getLength(buff)];
if (encoding.length < CRL_CACHE_SEED_LENGTH) {
throw new CRLException(
Messages.getString("security.15B4")); //$NON-NLS-1$
}
inStream.read(encoding);
CRL res = (CRL) CRL_CACHE.get(hash, encoding);
if (res != null) {
return res;
}
res = new X509CRLImpl(encoding);
CRL_CACHE.put(hash, encoding, res);
return res;
} else {
X509CRL res = new X509CRLImpl(inStream);
CRL_CACHE.put(hash, res.getEncoded(), res);
return res;
}
}
}
/*
* This class extends any existing input stream with
* mark functionality. It acts as a wrapper over the
* stream and supports reset to the
* marked state with readlimit no more than BUFF_SIZE.
*/
private static class RestoringInputStream extends InputStream {
// wrapped input stream
private final InputStream inStream;
// specifies how much of the read data is buffered
// after the mark has been set up
private static final int BUFF_SIZE = 32;
// buffer to keep the bytes read after the mark has been set up
private final int[] buff = new int[BUFF_SIZE*2];
// position of the next byte to read,
// the value of -1 indicates that the buffer is not used
// (mark was not set up or was invalidated, or reset to the marked
// position has been done and all the buffered data was read out)
private int pos = -1;
// position of the last buffered byte
private int bar = 0;
// position in the buffer where the mark becomes invalidated
private int end = 0;
/**
* Creates the mark supporting wrapper over the stream.
*/
public RestoringInputStream(InputStream inStream) {
this.inStream = inStream;
}
/**
* @see java.io.InputStream#available()
* method documentation for more info
*/
public int available() throws IOException {
return (bar - pos) + inStream.available();
}
/**
* @see java.io.InputStream#close()
* method documentation for more info
*/
public void close() throws IOException {
inStream.close();
}
/**
* @see java.io.InputStream#mark(int readlimit)
* method documentation for more info
*/
public void mark(int readlimit) {
if (pos < 0) {
pos = 0;
bar = 0;
end = BUFF_SIZE - 1;
} else {
end = (pos + BUFF_SIZE - 1) % BUFF_SIZE;
}
}
/**
* @see java.io.InputStream#markSupported()
* method documentation for more info
*/
public boolean markSupported() {
return true;
}
/**
* Reads the byte from the stream. If mark has been set up
* and was not invalidated byte is read from the underlying
* stream and saved into the buffer. If the current read position
* has been reset to the marked position and there are remaining
* bytes in the buffer, the byte is taken from it. In the other cases
* (if mark has been invalidated, or there are no buffered bytes)
* the byte is taken directly from the underlying stream and it is
* returned without saving to the buffer.
*
* @see java.io.InputStream#read()
* method documentation for more info
*/
public int read() throws IOException {
// if buffer is currently used
if (pos >= 0) {
// current position in the buffer
int cur = pos % BUFF_SIZE;
// check whether the buffer contains the data to be read
if (cur < bar) {
// return the data from the buffer
pos++;
return buff[cur];
}
// check whether buffer has free space
if (cur != end) {
// it has, so read the data from the wrapped stream
// and place it in the buffer
buff[cur] = inStream.read();
bar = cur+1;
pos++;
return buff[cur];
} else {
// buffer if full and can not operate
// any more, so invalidate the mark position
// and turn off the using of buffer
pos = -1;
}
}
// buffer is not used, so return the data from the wrapped stream
return inStream.read();
}
/**
* @see java.io.InputStream#read(byte[] b)
* method documentation for more info
*/
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
/**
* @see java.io.InputStream#read(byte[] b, int off, int len)
* method documentation for more info
*/
public int read(byte[] b, int off, int len) throws IOException {
int read_b;
int i;
for (i=0; i<len; i++) {
if ((read_b = read()) == -1) {
return (i == 0) ? -1 : i;
}
b[off+i] = (byte) read_b;
}
return i;
}
/**
* @see java.io.InputStream#reset()
* method documentation for more info
*/
public void reset() throws IOException {
if (pos >= 0) {
pos = (end + 1) % BUFF_SIZE;
} else {
throw new IOException(
Messages.getString("security.15A")); //$NON-NLS-1$
}
}
/**
* @see java.io.InputStream#skip(long n)
* method documentation for more info
*/
public long skip(long n) throws IOException {
if (pos >= 0) {
long i = 0;
int av = available();
if (av < n) {
n = av;
}
while ((i < n) && (read() != -1)) {
i++;
}
return i;
} else {
return inStream.skip(n);
}
}
}
}
|
apache/incubator-brooklyn | 38,143 | brooklyn-server/core/src/main/java/org/apache/brooklyn/core/mgmt/internal/LocalEntityManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.brooklyn.core.mgmt.internal;
import static com.google.common.base.Preconditions.checkNotNull;
import groovy.util.ObservableList;
import java.lang.reflect.Proxy;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.Nullable;
import org.apache.brooklyn.api.entity.Application;
import org.apache.brooklyn.api.entity.Entity;
import org.apache.brooklyn.api.entity.EntitySpec;
import org.apache.brooklyn.api.entity.EntityTypeRegistry;
import org.apache.brooklyn.api.entity.Group;
import org.apache.brooklyn.api.mgmt.AccessController;
import org.apache.brooklyn.api.mgmt.Task;
import org.apache.brooklyn.api.policy.Policy;
import org.apache.brooklyn.api.policy.PolicySpec;
import org.apache.brooklyn.api.sensor.Enricher;
import org.apache.brooklyn.api.sensor.EnricherSpec;
import org.apache.brooklyn.core.BrooklynLogging;
import org.apache.brooklyn.core.entity.AbstractEntity;
import org.apache.brooklyn.core.entity.Entities;
import org.apache.brooklyn.core.entity.EntityInternal;
import org.apache.brooklyn.core.entity.EntityPredicates;
import org.apache.brooklyn.core.entity.trait.Startable;
import org.apache.brooklyn.core.internal.storage.BrooklynStorage;
import org.apache.brooklyn.core.mgmt.BrooklynTaskTags;
import org.apache.brooklyn.core.objs.BasicEntityTypeRegistry;
import org.apache.brooklyn.core.objs.proxy.EntityProxy;
import org.apache.brooklyn.core.objs.proxy.EntityProxyImpl;
import org.apache.brooklyn.core.objs.proxy.InternalEntityFactory;
import org.apache.brooklyn.core.objs.proxy.InternalPolicyFactory;
import org.apache.brooklyn.util.collections.MutableSet;
import org.apache.brooklyn.util.collections.SetFromLiveMap;
import org.apache.brooklyn.util.core.task.Tasks;
import org.apache.brooklyn.util.exceptions.Exceptions;
import org.apache.brooklyn.util.time.CountdownTimer;
import org.apache.brooklyn.util.time.Duration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.Beta;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class LocalEntityManager implements EntityManagerInternal {
private static final Logger log = LoggerFactory.getLogger(LocalEntityManager.class);
private final LocalManagementContext managementContext;
private final BasicEntityTypeRegistry entityTypeRegistry;
private final InternalEntityFactory entityFactory;
private final InternalPolicyFactory policyFactory;
/** Entities that have been created, but have not yet begun to be managed */
protected final Map<String,Entity> preRegisteredEntitiesById = Collections.synchronizedMap(new WeakHashMap<String, Entity>());
/** Entities that are in the process of being managed, but where management is not yet complete */
protected final Map<String,Entity> preManagedEntitiesById = Collections.synchronizedMap(new WeakHashMap<String, Entity>());
/** Proxies of the managed entities */
protected final ConcurrentMap<String,Entity> entityProxiesById = Maps.newConcurrentMap();
/** Real managed entities */
protected final Map<String,Entity> entitiesById = Maps.newLinkedHashMap();
/** Management mode for each entity */
protected final Map<String,ManagementTransitionMode> entityModesById = Collections.synchronizedMap(Maps.<String,ManagementTransitionMode>newLinkedHashMap());
/** Proxies of the managed entities */
protected final ObservableList entities = new ObservableList();
/** Proxies of the managed entities that are applications */
protected final Set<Application> applications = Sets.newConcurrentHashSet();
private final BrooklynStorage storage;
private final Map<String,String> entityTypes;
private final Set<String> applicationIds;
public LocalEntityManager(LocalManagementContext managementContext) {
this.managementContext = checkNotNull(managementContext, "managementContext");
this.storage = managementContext.getStorage();
this.entityTypeRegistry = new BasicEntityTypeRegistry();
this.policyFactory = new InternalPolicyFactory(managementContext);
this.entityFactory = new InternalEntityFactory(managementContext, entityTypeRegistry, policyFactory);
entityTypes = storage.getMap("entities");
applicationIds = SetFromLiveMap.create(storage.<String,Boolean>getMap("applications"));
}
public InternalEntityFactory getEntityFactory() {
if (!isRunning()) throw new IllegalStateException("Management context no longer running");
return entityFactory;
}
public InternalPolicyFactory getPolicyFactory() {
if (!isRunning()) throw new IllegalStateException("Management context no longer running");
return policyFactory;
}
@Override
public EntityTypeRegistry getEntityTypeRegistry() {
if (!isRunning()) throw new IllegalStateException("Management context no longer running");
return entityTypeRegistry;
}
@SuppressWarnings("unchecked")
@Override
public <T extends Entity> T createEntity(EntitySpec<T> spec) {
try {
T entity = entityFactory.createEntity(spec);
Entity proxy = ((AbstractEntity)entity).getProxy();
checkNotNull(proxy, "proxy for entity %s, spec %s", entity, spec);
manage(entity);
return (T) proxy;
} catch (Throwable e) {
log.warn("Failed to create entity using spec "+spec+" (rethrowing)", e);
throw Exceptions.propagate(e);
}
}
@Override
public <T extends Entity> T createEntity(Map<?,?> config, Class<T> type) {
return createEntity(EntitySpec.create(config, type));
}
@Override
public <T extends Policy> T createPolicy(PolicySpec<T> spec) {
try {
return policyFactory.createPolicy(spec);
} catch (Throwable e) {
log.warn("Failed to create policy using spec "+spec+" (rethrowing)", e);
throw Exceptions.propagate(e);
}
}
@Override
public <T extends Enricher> T createEnricher(EnricherSpec<T> spec) {
try {
return policyFactory.createEnricher(spec);
} catch (Throwable e) {
log.warn("Failed to create enricher using spec "+spec+" (rethrowing)", e);
throw Exceptions.propagate(e);
}
}
@Override
public Collection<Entity> getEntities() {
return ImmutableList.copyOf(entityProxiesById.values());
}
@Override
public Collection<String> getEntityIds() {
return ImmutableList.copyOf(entityProxiesById.keySet());
}
@Override
public Collection<Entity> getEntitiesInApplication(Application application) {
Predicate<Entity> predicate = EntityPredicates.applicationIdEqualTo(application.getId());
return ImmutableList.copyOf(Iterables.filter(entityProxiesById.values(), predicate));
}
@Override
public Collection<Entity> findEntities(Predicate<? super Entity> filter) {
return ImmutableList.copyOf(Iterables.filter(entityProxiesById.values(), filter));
}
@Override
public Collection<Entity> findEntitiesInApplication(Application application, Predicate<? super Entity> filter) {
Predicate<Entity> predicate = Predicates.and(EntityPredicates.applicationIdEqualTo(application.getId()), filter);
return ImmutableList.copyOf(Iterables.filter(entityProxiesById.values(), predicate));
}
@Override
public Iterable<Entity> getAllEntitiesInApplication(Application application) {
Predicate<Entity> predicate = EntityPredicates.applicationIdEqualTo(application.getId());
Iterable<Entity> allentities = Iterables.concat(preRegisteredEntitiesById.values(), preManagedEntitiesById.values(), entityProxiesById.values());
Iterable<Entity> result = Iterables.filter(allentities, predicate);
return ImmutableSet.copyOf(Iterables.transform(result, new Function<Entity, Entity>() {
@Override public Entity apply(Entity input) {
return Entities.proxy(input);
}}));
}
@Override
public Entity getEntity(String id) {
return entityProxiesById.get(id);
}
Collection<Application> getApplications() {
return ImmutableList.copyOf(applications);
}
@Override
public boolean isManaged(Entity e) {
return (isRunning() && getEntity(e.getId()) != null);
}
boolean isPreRegistered(Entity e) {
return preRegisteredEntitiesById.containsKey(e.getId());
}
void prePreManage(Entity entity) {
if (isPreRegistered(entity)) {
log.warn(""+this+" redundant call to pre-pre-manage entity "+entity+"; skipping",
new Exception("source of duplicate pre-pre-manage of "+entity));
return;
}
preRegisteredEntitiesById.put(entity.getId(), entity);
}
@Override
public ManagementTransitionMode getLastManagementTransitionMode(String itemId) {
return entityModesById.get(itemId);
}
@Override
public void setManagementTransitionMode(Entity item, ManagementTransitionMode mode) {
entityModesById.put(item.getId(), mode);
}
// TODO synchronization issues here. We guard with isManaged(), but if another thread executing
// concurrently then the managed'ness could be set after our check but before we do
// onManagementStarting etc. However, we can't just synchronize because we're calling alien code
// (the user might override entity.onManagementStarting etc).
//
// TODO We need to do some check about isPreManaged - i.e. is there another thread (or is this a
// re-entrant call) where the entity is not yet full managed (i.e. isManaged==false) but we're in
// the middle of managing it.
//
// TODO Also see LocalLocationManager.manage(Entity), if fixing things here
@Override
public void manage(Entity e) {
if (isManaged(e)) {
log.warn(""+this+" redundant call to start management of entity (and descendants of) "+e+"; skipping",
new Exception("source of duplicate management of "+e));
return;
}
manageRecursive(e, ManagementTransitionMode.guessing(BrooklynObjectManagementMode.NONEXISTENT, BrooklynObjectManagementMode.MANAGED_PRIMARY));
}
@Override
public void manageRebindedRoot(Entity item) {
ManagementTransitionMode mode = getLastManagementTransitionMode(item.getId());
Preconditions.checkNotNull(mode, "Mode not set for rebinding %s", item);
manageRecursive(item, mode);
}
protected void checkManagementAllowed(Entity item) {
AccessController.Response access = managementContext.getAccessController().canManageEntity(item);
if (!access.isAllowed()) {
throw new IllegalStateException("Access controller forbids management of "+item+": "+access.getMsg());
}
}
/* TODO we sloppily use "recursive" to ensure ordering of parent-first in many places
* (which may not be necessary but seems like a good idea),
* and also to collect many entities when doing a big rebind,
* ensuring all have #manageNonRecursive called before calling #onManagementStarted.
*
* it would be better to have a manageAll(Map<Entity,ManagementTransitionMode> items)
* method which did that in two phases, allowing us to selectively rebind,
* esp when we come to want supporting different modes and different brooklyn nodes.
*
* the impl of manageAll could sort them with parents before children,
* (and manageRecursive could simply populate a map and delegate to manageAll).
*
* manageRebindRoot would then go, and the (few) callers would construct the map.
*
* similarly we might want an unmanageAll(),
* although possibly all unmanagement should be recursive, if we assume an entity's ancestors are always at least proxied
* (and the non-recursive RO path here could maybe be dropped)
*/
/** Applies management lifecycle callbacks (onManagementStarting, for all beforehand, then onManagementStopped, for all after) */
protected void manageRecursive(Entity e, final ManagementTransitionMode initialMode) {
checkManagementAllowed(e);
final List<EntityInternal> allEntities = Lists.newArrayList();
Predicate<EntityInternal> manageEntity = new Predicate<EntityInternal>() { public boolean apply(EntityInternal it) {
ManagementTransitionMode mode = getLastManagementTransitionMode(it.getId());
if (mode==null) {
setManagementTransitionMode(it, mode = initialMode);
}
Boolean isReadOnlyFromEntity = it.getManagementSupport().isReadOnlyRaw();
if (isReadOnlyFromEntity==null) {
if (mode.isReadOnly()) {
// should have been marked by rebinder
log.warn("Read-only entity "+it+" not marked as such on call to manage; marking and continuing");
}
it.getManagementSupport().setReadOnly(mode.isReadOnly());
} else {
if (!isReadOnlyFromEntity.equals(mode.isReadOnly())) {
log.warn("Read-only status at entity "+it+" ("+isReadOnlyFromEntity+") not consistent with management mode "+mode);
}
}
if (it.getManagementSupport().isDeployed()) {
if (mode.wasNotLoaded()) {
// silently bail out
return false;
} else {
if (mode.wasPrimary() && mode.isPrimary()) {
// active partial rebind; continue
} else if (mode.wasReadOnly() && mode.isReadOnly()) {
// reload in RO mode
} else {
// on initial non-RO rebind, should not have any deployed instances
log.warn("Already deployed "+it+" when managing "+mode+"/"+initialMode+"; ignoring this and all descendants");
return false;
}
}
}
// check RO status is consistent
boolean isNowReadOnly = Boolean.TRUE.equals( ((EntityInternal)it).getManagementSupport().isReadOnly() );
if (mode.isReadOnly()!=isNowReadOnly) {
throw new IllegalStateException("Read-only status mismatch for "+it+": "+mode+" / RO="+isNowReadOnly);
}
allEntities.add(it);
preManageNonRecursive(it, mode);
it.getManagementSupport().onManagementStarting( new ManagementTransitionInfo(managementContext, mode) );
return manageNonRecursive(it, mode);
} };
boolean isRecursive = true;
if (initialMode.wasPrimary() && initialMode.isPrimary()) {
// already managed, so this shouldn't be recursive
// (in ActivePartialRebind we cheat, calling in to this method then skipping recursion).
// it also falls through to here when doing a redundant promotion,
// in that case we *should* be recursive; determine by checking whether a child exists and is preregistered.
// the TODO above removing manageRebindRoot in favour of explicit mgmt list would clean this up a lot!
Entity aChild = Iterables.getFirst(e.getChildren(), null);
if (aChild!=null && isPreRegistered(aChild)) {
log.debug("Managing "+e+" in mode "+initialMode+", doing this recursively because a child is preregistered");
} else {
log.debug("Managing "+e+" but skipping recursion, as mode is "+initialMode);
isRecursive = false;
}
}
if (!isRecursive) {
manageEntity.apply( (EntityInternal)e );
} else {
recursively(e, manageEntity);
}
for (EntityInternal it : allEntities) {
if (!it.getManagementSupport().isFullyManaged()) {
ManagementTransitionMode mode = getLastManagementTransitionMode(it.getId());
ManagementTransitionInfo info = new ManagementTransitionInfo(managementContext, mode);
it.getManagementSupport().onManagementStarted(info);
managementContext.getRebindManager().getChangeListener().onManaged(it);
}
}
}
@Override
public void unmanage(final Entity e) {
// TODO don't want to guess; should we inspect state of e ? or maybe it doesn't matter ?
unmanage(e, ManagementTransitionMode.guessing(BrooklynObjectManagementMode.MANAGED_PRIMARY, BrooklynObjectManagementMode.NONEXISTENT));
}
public void unmanage(final Entity e, final ManagementTransitionMode mode) {
unmanage(e, mode, false);
}
private void unmanage(final Entity e, ManagementTransitionMode mode, boolean hasBeenReplaced) {
if (shouldSkipUnmanagement(e)) return;
final ManagementTransitionInfo info = new ManagementTransitionInfo(managementContext, mode);
if (hasBeenReplaced) {
// we are unmanaging an old instance after having replaced it
// don't unmanage or even clear its fields, because there might be references to it
if (mode.wasReadOnly()) {
// if coming *from* read only; nothing needed
} else {
if (!mode.wasPrimary()) {
log.warn("Unexpected mode "+mode+" for unmanage-replace "+e+" (applying anyway)");
}
// migrating away or in-place active partial rebind:
((EntityInternal)e).getManagementSupport().onManagementStopping(info);
stopTasks(e);
((EntityInternal)e).getManagementSupport().onManagementStopped(info);
}
// do not remove from maps below, bail out now
return;
} else if (mode.wasReadOnly() && mode.isNoLongerLoaded()) {
// we are unmanaging an instance (secondary); either stopping here or primary destroyed elsewhere
((EntityInternal)e).getManagementSupport().onManagementStopping(info);
unmanageNonRecursive(e);
stopTasks(e);
((EntityInternal)e).getManagementSupport().onManagementStopped(info);
managementContext.getRebindManager().getChangeListener().onUnmanaged(e);
if (managementContext.getGarbageCollector() != null) managementContext.getGarbageCollector().onUnmanaged(e);
} else if (mode.wasPrimary() && mode.isNoLongerLoaded()) {
// unmanaging a primary; currently this is done recursively
/* TODO tidy up when it is recursive and when it isn't; if something is being unloaded or destroyed,
* that probably *is* recursive, but the old mode might be different if in some cases things are read-only.
* or maybe nothing needs to be recursive, we just make sure the callers (e.g. HighAvailabilityModeImpl.clearManagedItems)
* call in a good order
*
* see notes above about recursive/manage/All/unmanageAll
*/
// Need to store all child entities as onManagementStopping removes a child from the parent entity
final List<EntityInternal> allEntities = Lists.newArrayList();
recursively(e, new Predicate<EntityInternal>() { public boolean apply(EntityInternal it) {
if (shouldSkipUnmanagement(it)) return false;
allEntities.add(it);
it.getManagementSupport().onManagementStopping(info);
return true;
} });
for (EntityInternal it : allEntities) {
if (shouldSkipUnmanagement(it)) continue;
unmanageNonRecursive(it);
stopTasks(it);
}
for (EntityInternal it : allEntities) {
it.getManagementSupport().onManagementStopped(info);
managementContext.getRebindManager().getChangeListener().onUnmanaged(it);
if (managementContext.getGarbageCollector() != null) managementContext.getGarbageCollector().onUnmanaged(e);
}
} else {
log.warn("Invalid mode for unmanage: "+mode+" on "+e+" (ignoring)");
}
preRegisteredEntitiesById.remove(e.getId());
preManagedEntitiesById.remove(e.getId());
entityProxiesById.remove(e.getId());
entitiesById.remove(e.getId());
entityModesById.remove(e.getId());
}
private void stopTasks(Entity entity) {
stopTasks(entity, null);
}
/** stops all tasks (apart from any current one or its descendants) on this entity,
* optionally -- if a timeout is given -- waiting for completion and warning on incomplete tasks */
@Beta
public void stopTasks(Entity entity, @Nullable Duration timeout) {
CountdownTimer timeleft = timeout==null ? null : timeout.countdownTimer();
// try forcibly interrupting tasks on managed entities
Collection<Exception> exceptions = MutableSet.of();
try {
Set<Task<?>> tasksCancelled = MutableSet.of();
for (Task<?> t: managementContext.getExecutionContext(entity).getTasks()) {
if (entity.equals(BrooklynTaskTags.getContextEntity(Tasks.current())) && hasTaskAsAncestor(t, Tasks.current())) {
// don't cancel if we are running inside a task on the target entity and
// the task being considered is one we have submitted -- e.g. on "stop" don't cancel ourselves!
// but if our current task is from another entity we probably do want to cancel them (we are probably invoking unmanage)
continue;
}
if (!t.isDone()) {
try {
log.debug("Cancelling "+t+" on "+entity);
tasksCancelled.add(t);
t.cancel(true);
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
log.debug("Error cancelling "+t+" on "+entity+" (will warn when all tasks are cancelled): "+e, e);
exceptions.add(e);
}
}
}
if (timeleft!=null) {
Set<Task<?>> tasksIncomplete = MutableSet.of();
// go through all tasks, not just cancelled ones, in case there are previously cancelled ones which are not complete
for (Task<?> t: managementContext.getExecutionContext(entity).getTasks()) {
if (hasTaskAsAncestor(t, Tasks.current()))
continue;
if (!Tasks.blockUntilInternalTasksEnded(t, timeleft.getDurationRemaining())) {
tasksIncomplete.add(t);
}
}
if (!tasksIncomplete.isEmpty()) {
log.warn("Incomplete tasks when stopping "+entity+": "+tasksIncomplete);
}
if (log.isTraceEnabled())
log.trace("Cancelled "+tasksCancelled+" tasks for "+entity+", with "+
timeleft.getDurationRemaining()+" remaining (of "+timeout+"): "+tasksCancelled);
} else {
if (log.isTraceEnabled())
log.trace("Cancelled "+tasksCancelled+" tasks for "+entity+": "+tasksCancelled);
}
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
log.warn("Error inspecting tasks to cancel on unmanagement: "+e, e);
}
if (!exceptions.isEmpty())
log.warn("Error when cancelling tasks for "+entity+" on unmanagement: "+Exceptions.create(exceptions));
}
private boolean hasTaskAsAncestor(Task<?> t, Task<?> potentialAncestor) {
if (t==null || potentialAncestor==null) return false;
if (t.equals(potentialAncestor)) return true;
return hasTaskAsAncestor(t.getSubmittedByTask(), potentialAncestor);
}
/**
* activates management when effector invoked, warning unless context is acceptable
* (currently only acceptable context is "start")
*/
void manageIfNecessary(Entity entity, Object context) {
if (!isRunning()) {
return; // TODO Still a race for terminate being called, and then isManaged below returning false
} else if (((EntityInternal)entity).getManagementSupport().wasDeployed()) {
return;
} else if (isManaged(entity)) {
return;
} else if (isPreManaged(entity)) {
return;
} else if (Boolean.TRUE.equals(((EntityInternal)entity).getManagementSupport().isReadOnly())) {
return;
} else {
Entity rootUnmanaged = entity;
while (true) {
Entity candidateUnmanagedParent = rootUnmanaged.getParent();
if (candidateUnmanagedParent == null || isManaged(candidateUnmanagedParent) || isPreManaged(candidateUnmanagedParent))
break;
rootUnmanaged = candidateUnmanagedParent;
}
if (context == Startable.START.getName())
log.info("Activating local management for {} on start", rootUnmanaged);
else
log.warn("Activating local management for {} due to effector invocation on {}: {}", new Object[]{rootUnmanaged, entity, context});
manage(rootUnmanaged);
}
}
private void recursively(Entity e, Predicate<EntityInternal> action) {
Entity otherPreregistered = preRegisteredEntitiesById.get(e.getId());
if (otherPreregistered!=null) {
// if something has been pre-registered, prefer it
// (e.g. if we recursing through children, we might have a proxy from previous iteration;
// the most recent will have been pre-registered)
e = otherPreregistered;
}
boolean success = action.apply( (EntityInternal)e );
if (!success) {
return; // Don't manage children if action false/unnecessary for parent
}
for (Entity child : e.getChildren()) {
recursively(child, action);
}
}
/**
* Whether the entity is in the process of being managed.
*/
private synchronized boolean isPreManaged(Entity e) {
return preManagedEntitiesById.containsKey(e.getId());
}
/**
* Should ensure that the entity is now known about, but should not be accessible from other entities yet.
*
* Records that the given entity is about to be managed (used for answering {@link #isPreManaged(Entity)}.
* Note that refs to the given entity are stored in a a weak hashmap so if the subsequent management
* attempt fails then this reference to the entity will eventually be discarded (if no-one else holds
* a reference).
*/
private synchronized boolean preManageNonRecursive(Entity e, ManagementTransitionMode mode) {
Entity realE = toRealEntity(e);
Object old = preManagedEntitiesById.put(e.getId(), realE);
preRegisteredEntitiesById.remove(e.getId());
if (old!=null && mode.wasNotLoaded()) {
if (old.equals(e)) {
log.warn("{} redundant call to pre-start management of entity {}, mode {}; ignoring", new Object[] { this, e, mode });
} else {
throw new IllegalStateException("call to pre-manage entity "+e+" ("+mode+") but different entity "+old+" already known under that id at "+this);
}
return false;
} else {
if (log.isTraceEnabled()) log.trace("{} pre-start management of entity {}, mode {}",
new Object[] { this, e, mode });
return true;
}
}
/**
* Should ensure that the entity is now managed somewhere, and known about in all the lists.
* Returns true if the entity has now become managed; false if it was already managed (anything else throws exception)
*/
private synchronized boolean manageNonRecursive(Entity e, ManagementTransitionMode mode) {
Entity old = entitiesById.get(e.getId());
if (old!=null && mode.wasNotLoaded()) {
if (old.equals(e)) {
log.warn("{} redundant call to start management of entity {}; ignoring", this, e);
} else {
throw new IllegalStateException("call to manage entity "+e+" ("+mode+") but different entity "+old+" already known under that id at "+this);
}
return false;
}
BrooklynLogging.log(log, BrooklynLogging.levelDebugOrTraceIfReadOnly(e),
"{} starting management of entity {}", this, e);
Entity realE = toRealEntity(e);
Entity oldProxy = entityProxiesById.get(e.getId());
Entity proxyE;
if (oldProxy!=null) {
if (mode.wasNotLoaded()) {
throw new IllegalStateException("call to manage entity "+e+" from unloaded state ("+mode+") but already had proxy "+oldProxy+" already known under that id at "+this);
}
// make the old proxy point at this new delegate
// (some other tricks done in the call below)
((EntityProxyImpl)(Proxy.getInvocationHandler(oldProxy))).resetDelegate(oldProxy, oldProxy, realE);
proxyE = oldProxy;
} else {
proxyE = toProxyEntityIfAvailable(e);
}
entityProxiesById.put(e.getId(), proxyE);
entityTypes.put(e.getId(), realE.getClass().getName());
entitiesById.put(e.getId(), realE);
preManagedEntitiesById.remove(e.getId());
if ((e instanceof Application) && (e.getParent()==null)) {
applications.add((Application)proxyE);
applicationIds.add(e.getId());
}
if (!entities.contains(proxyE))
entities.add(proxyE);
if (old!=null && old!=e) {
// passing the transition info will ensure the right shutdown steps invoked for old instance
unmanage(old, mode, true);
}
return true;
}
/**
* Should ensure that the entity is no longer managed anywhere, remove from all lists.
* Returns true if the entity has been removed from management; if it was not previously managed (anything else throws exception)
*/
private boolean unmanageNonRecursive(Entity e) {
/*
* When method is synchronized, hit deadlock:
* 1. thread called unmanage() on a member of a group, so we got the lock and called group.removeMember;
* this ties to synchronize on AbstractGroupImpl.members
* 2. another thread was doing AbstractGroupImpl.addMember, which is synchronized on AbstractGroupImpl.members;
* it tries to call Entities.manage(child) which calls LocalEntityManager.getEntity(), which is
* synchronized on this.
*
* We MUST NOT call alien code from within the management framework while holding locks.
* The AbstractGroup.removeMember is effectively alien because a user could override it, and because
* it is entity specific.
*
* TODO Does getting then removing from groups risk this entity being added to other groups while
* this is happening? Should abstractEntity.onManagementStopped or some such remove the entity
* from its groups?
*/
if (!getLastManagementTransitionMode(e.getId()).isReadOnly()) {
e.clearParent();
for (Group group : e.groups()) {
if (!Entities.isNoLongerManaged(group)) group.removeMember(e);
}
if (e instanceof Group) {
Collection<Entity> members = ((Group)e).getMembers();
for (Entity member : members) {
if (!Entities.isNoLongerManaged(member)) member.removeGroup((Group)e);
}
}
} else {
log.debug("No relations being updated on unmanage of read only {}", e);
}
synchronized (this) {
Entity proxyE = toProxyEntityIfAvailable(e);
if (e instanceof Application) {
applications.remove(proxyE);
applicationIds.remove(e.getId());
}
entities.remove(proxyE);
entityProxiesById.remove(e.getId());
entityModesById.remove(e.getId());
Object old = entitiesById.remove(e.getId());
entityTypes.remove(e.getId());
if (old==null) {
log.warn("{} call to stop management of unknown entity (already unmanaged?) {}; ignoring", this, e);
return false;
} else if (!old.equals(e)) {
// shouldn't happen...
log.error("{} call to stop management of entity {} removed different entity {}", new Object[] { this, e, old });
return true;
} else {
if (log.isDebugEnabled()) log.debug("{} stopped management of entity {}", this, e);
return true;
}
}
}
void addEntitySetListener(CollectionChangeListener<Entity> listener) {
//must notify listener in a different thread to avoid deadlock (issue #378)
AsyncCollectionChangeAdapter<Entity> wrappedListener = new AsyncCollectionChangeAdapter<Entity>(managementContext.getExecutionManager(), listener);
entities.addPropertyChangeListener(new GroovyObservablesPropertyChangeToCollectionChangeAdapter(wrappedListener));
}
void removeEntitySetListener(CollectionChangeListener<Entity> listener) {
AsyncCollectionChangeAdapter<Entity> wrappedListener = new AsyncCollectionChangeAdapter<Entity>(managementContext.getExecutionManager(), listener);
entities.removePropertyChangeListener(new GroovyObservablesPropertyChangeToCollectionChangeAdapter(wrappedListener));
}
private boolean shouldSkipUnmanagement(Entity e) {
if (e==null) {
log.warn(""+this+" call to unmanage null entity; skipping",
new IllegalStateException("source of null unmanagement call to "+this));
return true;
}
if (!isManaged(e)) {
log.warn("{} call to stop management of unknown entity (already unmanaged?) {}; skipping, and all descendants", this, e);
return true;
}
return false;
}
private Entity toProxyEntityIfAvailable(Entity e) {
checkNotNull(e, "entity");
if (e instanceof EntityProxy) {
return e;
} else if (e instanceof AbstractEntity) {
Entity result = ((AbstractEntity)e).getProxy();
return (result == null) ? e : result;
} else {
// If we don't already know about the proxy, then use the real thing; presumably it's
// the legacy way of creating the entity so didn't get a preManage() call
return e;
}
}
private Entity toRealEntity(Entity e) {
checkNotNull(e, "entity");
if (e instanceof AbstractEntity) {
return e;
} else {
Entity result = toRealEntityOrNull(e.getId());
if (result == null) {
throw new IllegalStateException("No concrete entity known for entity "+e+" ("+e.getId()+", "+e.getEntityType().getName()+")");
}
return result;
}
}
public boolean isKnownEntityId(String id) {
return entitiesById.containsKey(id) || preManagedEntitiesById.containsKey(id) || preRegisteredEntitiesById.containsKey(id);
}
private Entity toRealEntityOrNull(String id) {
Entity result;
// prefer the preRegistered and preManaged entities, during hot proxying, they should be newer
result = preRegisteredEntitiesById.get(id);
if (result==null)
result = preManagedEntitiesById.get(id);
if (result==null)
entitiesById.get(id);
return result;
}
private boolean isRunning() {
return managementContext.isRunning();
}
}
|
apache/sis | 38,362 | endorsed/src/org.apache.sis.feature/main/org/apache/sis/feature/internal/shared/FeatureProjectionBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.feature.internal.shared;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Locale;
import java.util.Objects;
import java.util.Optional;
import java.util.function.UnaryOperator;
import org.opengis.util.GenericName;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.apache.sis.feature.Features;
import org.apache.sis.feature.AbstractOperation;
import org.apache.sis.feature.FeatureOperations;
import org.apache.sis.feature.builder.AssociationRoleBuilder;
import org.apache.sis.feature.builder.AttributeTypeBuilder;
import org.apache.sis.feature.builder.FeatureTypeBuilder;
import org.apache.sis.feature.builder.PropertyTypeBuilder;
import org.apache.sis.feature.internal.Resources;
import org.apache.sis.util.ArgumentCheckByAssertion;
import org.apache.sis.util.UnconvertibleObjectException;
import org.apache.sis.util.internal.shared.Strings;
import org.apache.sis.util.resources.Errors;
import org.apache.sis.util.resources.Vocabulary;
// Specific to the main branch:
import org.apache.sis.feature.AbstractFeature;
import org.apache.sis.feature.DefaultFeatureType;
import org.apache.sis.feature.DefaultAttributeType;
import org.apache.sis.feature.AbstractIdentifiedType;
import org.apache.sis.filter.Expression;
import org.apache.sis.pending.geoapi.filter.ValueReference;
/**
* A builder for deriving a feature type containing a subset of the properties of another type.
* The other type is called the {@linkplain #source() source} feature type. The properties that
* are retained may have different names than the property names of the source feature type.
* If a property is a link such as {@code sis:identifier} or {@code sis:geometry},
* this class keeps trace of the dependencies required for recreating the link.
*
* <p>Properties that are copied from the source feature type are declared by calls to
* {@link #addSourceProperty(AbstractIdentifiedType, boolean)} and related methods defined in this class.
* The methods inherited from the parent class can also be invoked,
* but they will receive no special treatment.</p>
*
* @author Martin Desruisseaux (Geomatys)
*/
public final class FeatureProjectionBuilder extends FeatureTypeBuilder {
/**
* The type of features that provide the values to store in the projected features.
* The value of this field does not change, except when following a XPath such as {@code "a/b/c"}.
*
* @see #source()
*/
private DefaultFeatureType source;
/**
* Whether the source is a dependency of the feature type given to the constructor.
* This flag become {@code true} when following a XPath of the form {@code "a/b/c"}.
* In such case, {@link #source} may be temporarily set to the tip {@code "c"} type.
*
* @see #using(DefaultFeatureType, FeatureExpression)
*/
private boolean sourceIsDependency;
/**
* The properties to inherit from the {@linkplain #source} feature type by explicit user's request.
* The property types and the property names are not necessarily the same as in the source feature.
* For example, an operation may be replaced by an attribute which will store the operation result.
*
* <h4>Implementation note</h4>
* This collection cannot be a {@link java.util.Map} with names of the source properties as keys,
* because the list may contain more than one item with the same {@link Item#sourceName} value.
* This collision happens if some items are the results of XPath evaluations such as {@code "a/b/c"}.
* This is the reason why properties can be renamed before to be stored in the projected features.
*/
private final List<Item> requested;
/**
* Names that are actually used, or may be used, in the projected feature type.
* For each name, the associated value is the item that is explicitly using that name.
* A {@code null} value means that the name is not used, but is nevertheless reserved
* because potentially ambiguous. This information is used for avoiding name collisions
* in automatically generated names.
*
* <p>Note that the keys are not necessarily the values of {@link Item#sourceName}.
* Keys are rather the values of {@code Item.builder.getName()}, except that the
* latter may not be valid before {@link Item#validateName()} is invoked.</p>
*
* @see #reserve(GenericName, Item)
*/
private final Map<GenericName, Item> reservedNames;
/**
* Whether at least one item is modified compared to the original property in the source feature type.
* A modified item may be an item with a name different than the property in {@linkplain #source}.
* If {@code true}, then the projection cannot be an {@linkplain #isIdentity() identity} operation.
* Result of operations such as links may also need to be fetched in advance,
* because operations cannot be executed anymore after the name of a dependency changed.
*
* @see Item#setName(GenericName)
* @see #isIdentity()
*/
private boolean hasModifiedProperties;
/**
* Sequential number for generating default names of unnamed properties. This is used when the
* name inherited from the {@linkplain #source} feature is unknown or collides with another name.
*/
private int unnamedNumber;
/**
* Names of {@linkplain #source} properties that are dependencies found in operations.
* The most common cases are the targets of {@code "sis:identifier"} and {@code "sis:geometry"} links.
* Values are the items having this dependency. Many items may share the same dependency.
*
* <p>At first, this map is populated without checking if the properties requested by the user contains
* those dependencies. After all user's requested properties have been declared to this builder, values
* are filtered for identifying which dependencies need to be added as implicit properties.</p>
*/
private final Map<String, List<Item>> dependencies;
/**
* Whether to store operations as attributes. By default, when a {@linkplain #addSourceProperty source
* property is added in the projection}, operation are forwarded as given (with their dependencies).
* But if this flag is set to {@code true}, then operations are replaced by an attribute.
* Then, it will be caller's responsibility to store the value.
*/
private boolean operationResultAsAttribute;
/**
* Creates a new builder instance using the default factories.
*
* @todo provides a way to specify the factories used by the data store.
*
* @param source the type from which to take the properties to keep in the projected feature.
* @param locale the locale to use for formatting error messages, or {@code null} for the default locale.
*/
public FeatureProjectionBuilder(final DefaultFeatureType source, final Locale locale) {
super(null, null, locale);
this.source = Objects.requireNonNull(source);
requested = new ArrayList<>();
dependencies = new HashMap<>();
reservedNames = new HashMap<>();
}
/**
* Returns the type of features that provide the values to store in the projected features.
* This is the type given at construction time, except when following a XPath such as
* {@code "a/b/c"} in which case it may temporarily be the leaf {@code "c"} type.
*
* @return the current source of properties (never {@code null}).
*/
public DefaultFeatureType source() {
return source;
}
/**
* Returns the expected type of the given expression using the given feature type as the source.
* This method temporarily sets the {@linkplain #source() source} to the given {@code childType},
* then returns the value of {@code expression.expectedType(this)}.
* This is used for the last expression in a XPath such as {@code "a/b/c"}.
*
* @param childType the feature type to use.
* @param expression the expression from which to get the expected type.
* @return handler for the property, or {@code null} if it cannot be resolved.
*
* @see FeatureExpression#expectedType(FeatureProjectionBuilder)
*/
public Item using(final DefaultFeatureType childType, final FeatureExpression<?,?> expression) {
final DefaultFeatureType previous = source;
final boolean status = sourceIsDependency;
try {
sourceIsDependency = true;
source = Objects.requireNonNull(childType);
return expression.expectedType(this);
} finally {
source = previous;
sourceIsDependency = status;
}
}
/**
* Adds a property from the source feature type, but replacing operation results by attributes.
* This method is invoked when an operation uses a source property as a template, usually because
* the result will be of the same type as one of the operation argument (usually the first argument).
* In such case, the caller does not want the operation to be executed, since the property will rather
* be used as a slot for receiving the result.
*
* @param childType the feature type to use.
* @param expression the expression from which to get the expected type.
* @return handler for the property, or {@code null} if it cannot be resolved.
*/
public Item addTemplateProperty(final FeatureExpression<?,?> expression) {
final boolean status = operationResultAsAttribute;
try {
operationResultAsAttribute = true;
return expression.expectedType(this);
} finally {
operationResultAsAttribute = status;
}
}
/**
* Adds the given property, replacing operation by an attribute storing the operation result.
* This method may return {@code null} if it cannot resolve the property type, in which case
* the caller should throw an exception (throwing an exception is left to the caller because
* it can produces a better error message). Operation's dependencies, if any, are added into
* the given {@code deferred} set.
*
* @param property the {@linkplain #source} property to add.
* @param deferred where to add operation's dependencies, or {@code null} for not collecting dependencies.
* @return builder for the projected property, or {@code null} if it cannot be resolved.
*/
private PropertyTypeBuilder addPropertyResult(AbstractIdentifiedType property, final Collection<String> deferred) {
if (property instanceof AbstractOperation) {
final GenericName name = property.getName();
do {
if (deferred != null) {
if (property instanceof AbstractOperation) {
deferred.addAll(((AbstractOperation) property).getDependencies());
} else {
/*
* Cannot resolve dependencies. Current implementation assumes that there is no dependency.
* Note: we could conservatively add all properties as dependencies, but this is difficult
* to implement efficiently.
*/
}
}
final AbstractIdentifiedType result = ((AbstractOperation) property).getResult();
if (result != property && result instanceof AbstractIdentifiedType) {
property = result;
} else if (result instanceof DefaultFeatureType) {
return addAssociation((DefaultFeatureType) result).setName(name);
} else {
return null;
}
} while (property instanceof AbstractOperation);
return addProperty(property).setName(name);
}
return addProperty(property);
}
/**
* Adds a property from the source feature type. The given property should be the result of a call to
* {@code source().getProperty(sourceName)}. The call to {@code getProperty(…)} is left to the caller
* because some callers need to wrap that call in a {@code try} block.
*
* @param property the property type, usually as one of the properties of {@link #source()}.
* @param named whether the {@code property} name can be used as a default name.
* @return handler for the given item, or {@code null} if the given property cannot be resolved.
*/
public Item addSourceProperty(final AbstractIdentifiedType property, final boolean named) {
if (property == null) {
return null;
}
final PropertyTypeBuilder builder;
final Collection<String> deferred;
if (sourceIsDependency) {
/*
* Adding a property which is not defined in the feature type specified at construction time,
* but which is defined at the tip of some XPath such as "a/b/c". This is not the same thing
* as adding an association. This is rather adding a subset of an association. We do not add
* dependency information because the dependencies are not directly in the source feature.
*/
reserve(property.getName(), null);
deferred = new ArrayList<>();
builder = addPropertyResult(property, deferred);
} else if (!operationResultAsAttribute && property instanceof AbstractOperation) {
/*
* For operations, remember the dependencies in order to determine (after we added all properties)
* if we can keep the property as an operation or if we will need to copy the value in an attribute.
* If the operation is not an `AbstractOperation`, unconditionally replace operation by its result.
*/
deferred = ((AbstractOperation) property).getDependencies();
builder = addProperty(property);
} else {
deferred = new ArrayList<>();
builder = addPropertyResult(property, deferred);
}
final var item = new Item(named ? property.getName() : null, builder);
requested.add(item);
for (String dependency : deferred) {
dependencies.computeIfAbsent(dependency, (key) -> new ArrayList<>(2)).add(item);
}
return item;
}
/**
* Adds a property created by the caller rather than extracted from the source feature.
* The given builder should have been created by a method of the {@link FeatureTypeBuilder} parent class.
* The name of the builder is usually not the name of a property in the {@linkplain #source() source} feature.
*
* <h4>Assertions</h4>
* This method verifies that the given builder is a member of the {@linkplain #properties() properties} collection.
* It also verifies that no {@link Item} have been created for that builder yet.
* For performance reasons, those verifications are performed only if assertions are enabled.
*
* @param builder builder for the computed property, or {@code null}.
* @param named whether the {@code builder} name can be used as a default name.
* @return handler for the given item, or {@code null} if the given builder was null.
*/
@ArgumentCheckByAssertion
public Item addComputedProperty(final PropertyTypeBuilder builder, final boolean named) {
if (builder == null) {
return null;
}
assert properties().contains(builder) : builder;
assert requested.stream().noneMatch((item) -> item.builder == builder) : builder;
final var item = new Item(named ? builder.getName() : null, builder);
requested.add(item);
return item;
}
/**
* Handler for a property inherited from the source feature type. The property is initially unnamed.
* A name can be specified explicitly after construction by a call to {@link #setName(GenericName)}.
* If no name is specified, the default name will be the same as in the source feature type if that
* name is available, or a default name otherwise.
*/
public final class Item {
/**
* The name that the property had in the {@linkplain #source() source} feature, or {@code null}.
* The property built by the {@linkplain #builder} will often have the same name, but not always.
*/
final GenericName sourceName;
/**
* The builder for configuring the property.
*/
private PropertyTypeBuilder builder;
/**
* Whether this item got an explicit name. The specified name may be
* identical to the name in the {@linkplain #source() source} feature.
*/
private boolean isNamed;
/**
* Whether to keep the current name if it is available. This is set to {@code true} when user did not
* specified explicitly a name, but keeping the name of the source property would be a natural choice.
* However, before to use that name, we need to wait and see if that name will be explicitly used for
* another property.
*/
private boolean preferCurrentName;
/**
* Whether this property is an operation having at least one dependency which is not included
* in the list of properties requested by the user. In such case, we cannot keep the operation
* and need to replace it by a stored attribute.
*
* @see #replaceIfMissingDependency()
*/
private boolean hasMissingDependency;
/**
* Expression for evaluating the attribute value from a source feature instance, or {@code null} if none.
* This field should be non-null only if the value will be stored in an attribute. If the property is an
* operation, then this field should be null (this is not the expression of the operation).
*
* @see #attributeValueGetter()
*/
private Expression<? super AbstractFeature, ?> attributeValueGetter;
/**
* Creates a new handle for the property created by the given builder.
*
* @param sourceName the property name in the {@linkplain #source() source} feature, or {@code null}.
* @param builder the builder for configuring the property.
*/
private Item(final GenericName sourceName, final PropertyTypeBuilder builder) {
this.sourceName = sourceName;
this.builder = builder;
}
/**
* Returns a string representation for debugging purposes.
*/
@Override
public String toString() {
return Strings.toString(getClass(),
"sourceName", (sourceName != null) ? sourceName.toString() : null,
"targetName", isNamed ? getName() : null,
"valueClass", (builder instanceof AttributeTypeBuilder<?>) ? ((AttributeTypeBuilder<?>) builder).getValueClass() : null,
null, hasMissingDependency ? "hasMissingDependency" : null);
}
/**
* Returns the property type builder wrapped by this item.
* The following operations are allowed on the returned builder:
*
* <ul>
* <li>Set the cardinality (minimum and maximum occurrences).</li>
* <li>Build the {@code PropertyType}.</li>
* </ul>
*
* The following operations should <em>not</em> be executed on the returned builder.
* Use the dedicated methods in this class instead:
*
* <ul>
* <li>Set the name: use {@link #setName(GenericName)}.</li>
* <li>Set the value class: use {@link #replaceValueClass(UnaryOperator)}.</li>
* </ul>
*
* @return the property type builder wrapped by this item.
*/
public PropertyTypeBuilder builder() {
hasModifiedProperties = true; // Conservative because the caller may do anything on the builder.
return builder;
}
/**
* Replaces this property by a stored attribute if at least one dependency is not in the list of properties
* requested by the user. This method should be invoked only for preparing the user requested feature type.
* This method should not be invoked for preparing the feature type with dependencies, because the latter
* should contain the missing dependencies.
*/
private void replaceIfMissingDependency() {
if (hasMissingDependency) {
hasMissingDependency = false;
hasModifiedProperties = true;
final var old = builder;
builder = addPropertyResult(old.build(), null); // `old.build()` returns the existing operation.
old.replaceBy(builder);
}
}
/**
* Sets the class of attribute values. If the builder is an instance of {@link AttributeTypeBuilder}
* and if {@code type.apply(valueClass)} returns a non-null value ({@code valueClass} is the current
* class of attribute values), then this method sets the new attribute value class to the specified
* type and returns {@code true}. Otherwise, this method returns {@code false}.
*
* @param type a converter from current class to the new class of attribute values.
* @return whether the value class has been set to the value returned by {@code type}.
* @throws UnconvertibleObjectException if the default value cannot be converted to the given type.
*/
public boolean replaceValueClass(final UnaryOperator<Class<?>> type) {
if (builder instanceof AttributeTypeBuilder<?>) {
final var ab = (AttributeTypeBuilder<?>) builder;
final Class<?> r = type.apply(ab.getValueClass());
if (r != null) {
if (builder != (builder = ab.setValueClass(r))) {
hasModifiedProperties = true;
}
return true;
}
} else if (builder instanceof AssociationRoleBuilder) {
// We do not yet have a special case for this one.
} else {
final var property = builder.build();
if (property instanceof AbstractOperation) {
/*
* Less common case where the caller wants to change the type of an operation.
* We cannot change the type of an operation (unless we replace the operation
* by a stored attribute). Therefore, we only check type compatibility.
*/
final var result = ((AbstractOperation) property).getResult();
if (result instanceof DefaultAttributeType<?>) {
final Class<?> c = ((DefaultAttributeType<?>) result).getValueClass();
final Class<?> r = type.apply(c);
if (r != null) {
/*
* We can be lenient for link operation, but must be strict for other operations.
* Example: a link to a geometry, but relaxing the `Polygon` type to `Geometry`.
*/
if (Features.getLinkTarget(property).isPresent() ? r.isAssignableFrom(c) : r.equals(c)) {
return true;
}
throw new UnconvertibleObjectException(Errors.forLocale(getLocale())
.getString(Errors.Keys.CanNotConvertFromType_2, c, r));
}
}
}
}
return false;
}
/**
* Sets the expression to use for evaluating the property value.
* If {@code stored} is {@code true} (the usual case), then the expression will be evaluated early
* and its result will be stored as an attribute value, unless this property is not an attribute.
* If {@code stored} is {@code false}, this method replaces the attribute by an operation wrapping
* the given expression. In other words, the evaluation of the expression will be deferred.
* The latter case is possible only if the {@code FeatureType} contains all dependencies
* that the operation needs.
*
* @param expression the expression to be evaluated by the operation.
*/
public void setValueGetter(final Expression<? super AbstractFeature, ?> expression, final boolean stored) {
if (builder instanceof AttributeTypeBuilder<?>) {
if (stored) {
attributeValueGetter = expression;
} else {
final var atb = (AttributeTypeBuilder<?>) builder;
/*
* Optimization: we could compute `storedType = atb.build()` unconditionally,
* which creates an attribute with the final name in the target feature type.
* However, in the particular case of links, we are better to use the name of
* the property in the source feature type, because it allows an optimization
* in `ExpressionOperation.create(…)` (a replacement by a `LinkOperation`).
*/
DefaultAttributeType<?> storedType = null;
if (expression instanceof ValueReference<?,?>) {
var candidate = source.getProperty(((ValueReference<?,?>) expression).getXPath());
if (candidate instanceof DefaultAttributeType<?>) {
storedType = (DefaultAttributeType<?>) candidate;
}
}
if (storedType == null) {
storedType = atb.build(); // Same name as in the `identification` map below.
}
final var identification = Map.of(AbstractOperation.NAME_KEY, builder.getName());
builder = addProperty(FeatureOperations.expression(identification, expression, storedType));
atb.replaceBy(builder);
hasModifiedProperties = true;
}
} else {
// The property is an operation, usually a link. Leave it as-is.
}
}
/**
* Returns the expression for evaluating the value to store in the attribute built by this item.
* The expression may be {@code null} if the value is computed on-the-fly (i.e. the property is
* an operation), or if the expression has not been specified.
*/
final Expression<? super AbstractFeature, ?> attributeValueGetter() {
return attributeValueGetter;
}
/**
* Sets the coordinate reference system that characterizes the values of this attribute.
*
* @param crs coordinate reference system associated to attribute values, or {@code null}.
* @return {@code this} for method calls chaining.
*/
public Item setCRS(final CoordinateReferenceSystem crs) {
if (builder instanceof AttributeTypeBuilder<?>) {
builder = ((AttributeTypeBuilder<?>) builder).setCRS(crs);
hasModifiedProperties = true;
}
return this;
}
/**
* Returns whether the property built by this item is equivalent to the given property.
* The caller should have verified that {@link #hasModifiedProperties} is {@code false}
* before to invoke this method, because the implementation performs a filtering based
* on the property name only. This is that way for accepting differences in metadata.
*
* @param property the property to compare.
* @return whether this item builds a property equivalent to the given one.
*
* @see #isIdentity()
*/
private boolean equivalent(final AbstractIdentifiedType property) {
return builder.getName().equals(property.getName());
}
/**
* Returns the name of the projected property.
* This is initially the name of the property given at construction time,
* but can be changed later by a call to {@link #setName(GenericName)}.
*
* @return the name of the projected property.
*/
public String getName() {
return builder.getName().toString();
}
/**
* Sets the name of the projected property. A {@code null} argument means that the name is unspecified,
* in which case a different name may be generated later if the current name collides with other names.
*
* <p>This method should be invoked exactly once for each item, even if the argument is {@code null}.
* The reason is because this method uses this information for recording which names to reserve.</p>
*
* @param targetName the desired name in the projected feature, or {@code null} if unspecified.
*/
public void setName(final GenericName targetName) {
if (targetName == null) {
reserve(sourceName, null); // Will use that name only if not owned by another item.
preferCurrentName = true;
} else if (targetName.equals(sourceName)) {
reserve(sourceName, this); // Take possession of that name.
isNamed = true;
} else {
builder.setName(targetName);
reserve(targetName, this);
hasModifiedProperties = true; // Because the name is different.
isNamed = true;
}
}
/**
* If this item has not received an explicit name, infers a default name.
* This method should be invoked only after {@link #setName(GenericName)}
* has been invoked for all items, for allowing this class to know which
* names are reserved.
*/
private void validateName() {
if (!isNamed) {
final Item owner = reservedNames.get(sourceName);
if (owner != this) {
GenericName name = sourceName;
if (owner != null || name == null || (!preferCurrentName && reservedNames.containsKey(name))) {
do {
var text = Vocabulary.formatInternational(Vocabulary.Keys.Unnamed_1, ++unnamedNumber);
name = builder.setName(text).getName(); // Local name with the appropriate name space.
} while (reservedNames.containsKey(name)); // Reminder: the associated value may be null.
}
reserve(name, this);
}
isNamed = true;
}
}
}
/**
* Declares the given name as reserved. If this class needs to generate a default name,
* it will ensure that automatically generated names do not conflict with reserved names.
*
* @param name name to reserve for a projected property type, or {@code null} if none.
* @param owner the builder using that name, or {@code null} if none.
*/
private void reserve(GenericName name, final Item owner) {
if (name != null) {
// By `putIfAbsent` method contract, non-null values have precedence over null values.
reservedNames.putIfAbsent(name, owner);
if (name != (name = name.tip())) { // Shortcut for a majority of cases.
reservedNames.putIfAbsent(name, owner);
}
}
}
/**
* Adds dependencies. This method adds in the {@code deferred} list any transitive
* dependencies which may need to be added in a second pass after this method call.
* The elements added into {@code deferred} are {@linkplain #source} properties.
*
* @param deferred where to add missing transitive dependencies (source properties).
* @throws UnsupportedOperationException if there is an attempt to rename a property which is used by an operation.
*/
private void resolveDependencies(final List<AbstractIdentifiedType> deferred) {
final var it = dependencies.entrySet().iterator();
while (it.hasNext()) {
final Map.Entry<String, List<Item>> entry = it.next();
final AbstractIdentifiedType property = source.getProperty(entry.getKey());
final GenericName sourceName = property.getName();
Item item = reservedNames.get(sourceName);
if (item != null) {
if (!sourceName.equals(item.sourceName)) {
throw new UnsupportedOperationException(Resources.forLocale(getLocale())
.getString(Resources.Keys.CannotRenameDependency_2, item.sourceName, sourceName));
}
} else {
for (Item dependent : entry.getValue()) {
dependent.hasMissingDependency = true;
}
deferred.add(property);
}
it.remove();
}
}
/**
* Returns {@code true} if the feature to be built should be equivalent to the source feature.
*
* @return whether the {@linkplain #source} feature type can be used directly.
*/
private boolean isIdentity() {
if (hasModifiedProperties) {
return false;
}
final Iterator<Item> it = requested.iterator();
for (AbstractIdentifiedType property : source.getProperties(true)) {
if (!(it.hasNext() && it.next().equivalent(property))) {
return false;
}
}
return !it.hasNext();
}
/**
* Returns the feature type described by this builder. This method may return the
* {@linkplain #source() source} directly if this projection performs no operation.
*/
@Override
public DefaultFeatureType build() {
return isIdentity() ? source : super.build();
}
/**
* Sets the default name of all anonymous properties, then builds the feature types.
* Two feature types are built: one with only the requested properties, and another
* type augmented with dependencies of operations such as links.
*
* <p>This method should be invoked exactly once.</p>
*
* <h4>Identity operation</h4>
* If the result is a feature type with all the properties of the source feature,
* with the same property names in the same order, and if the expressions are only
* fetching the values (no computation), then this method returns an empty value
* for meaning that this projection does nothing.
*
* @return the feature types with and without dependencies, or empty if there is no projection.
* @throws UnsupportedOperationException if there is an attempt to rename a property which is used by an operation.
*/
public Optional<FeatureProjection> project() {
requested.forEach(Item::validateName);
/*
* Add properties for all dependencies that are required by operations but are not already present.
* If there is no need to add anything, `typeWithDependencies` will be directly the feature type to return.
*/
final List<PropertyTypeBuilder> properties = properties();
final int count = properties.size();
final var deferred = new ArrayList<AbstractIdentifiedType>();
resolveDependencies(deferred);
/*
* If there is no dependencies, the requested type and the type with dependencies are the same.
* Otherwise, we need to resolve transitive dependencies before to build each type.
*/
final DefaultFeatureType typeRequested, typeWithDependencies;
if (deferred.isEmpty()) {
typeRequested = typeWithDependencies = build();
} else {
do {
for (AbstractIdentifiedType property : deferred) {
final Item item = addSourceProperty(property, true);
if (item != null) {
item.validateName();
item.setValueGetter(FeatureOperations.expressionOf(property), true);
}
}
deferred.clear();
resolveDependencies(deferred);
} while (!deferred.isEmpty());
typeWithDependencies = build();
properties.subList(count, properties.size()).clear(); // Keep only the properties requested by user.
requested.forEach(Item::replaceIfMissingDependency);
typeRequested = build();
}
if (source.equals(typeRequested) && source.equals(typeWithDependencies)) {
return Optional.empty();
}
return Optional.of(new FeatureProjection(typeRequested, typeWithDependencies, requested));
}
}
|
googleapis/google-cloud-java | 37,919 | java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1alpha1;
/**
*
*
* <pre>
* Request message for the CreateTable method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest}
*/
public final class CreateTableRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)
CreateTableRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTableRequest.newBuilder() to construct.
private CreateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTableRequest() {
parent_ = "";
tableId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTableRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TABLE_FIELD_NUMBER = 2;
private com.google.cloud.bigquery.biglake.v1alpha1.Table table_;
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the table field is set.
*/
@java.lang.Override
public boolean hasTable() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The table.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() {
return table_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()
: table_;
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() {
return table_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()
: table_;
}
public static final int TABLE_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object tableId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tableId.
*/
@java.lang.Override
public java.lang.String getTableId() {
java.lang.Object ref = tableId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tableId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tableId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTableIdBytes() {
java.lang.Object ref = tableId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tableId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getTable());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tableId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTable());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tableId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest other =
(com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasTable() != other.hasTable()) return false;
if (hasTable()) {
if (!getTable().equals(other.getTable())) return false;
}
if (!getTableId().equals(other.getTableId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasTable()) {
hash = (37 * hash) + TABLE_FIELD_NUMBER;
hash = (53 * hash) + getTable().hashCode();
}
hash = (37 * hash) + TABLE_ID_FIELD_NUMBER;
hash = (53 * hash) + getTableId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the CreateTable method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTableFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
table_ = null;
if (tableBuilder_ != null) {
tableBuilder_.dispose();
tableBuilder_ = null;
}
tableId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest build() {
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result =
new com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.table_ = tableBuilder_ == null ? table_ : tableBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.tableId_ = tableId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest other) {
if (other
== com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTable()) {
mergeTable(other.getTable());
}
if (!other.getTableId().isEmpty()) {
tableId_ = other.tableId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTableFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
tableId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this table will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.bigquery.biglake.v1alpha1.Table table_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
tableBuilder_;
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the table field is set.
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The table.
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() {
if (tableBuilder_ == null) {
return table_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()
: table_;
} else {
return tableBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) {
if (tableBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
table_ = value;
} else {
tableBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTable(
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) {
if (tableBuilder_ == null) {
table_ = builderForValue.build();
} else {
tableBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) {
if (tableBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& table_ != null
&& table_ != com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()) {
getTableBuilder().mergeFrom(value);
} else {
table_ = value;
}
} else {
tableBuilder_.mergeFrom(value);
}
if (table_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTable() {
bitField0_ = (bitField0_ & ~0x00000002);
table_ = null;
if (tableBuilder_ != null) {
tableBuilder_.dispose();
tableBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder getTableBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTableFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() {
if (tableBuilder_ != null) {
return tableBuilder_.getMessageOrBuilder();
} else {
return table_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()
: table_;
}
}
/**
*
*
* <pre>
* Required. The table to create. The `name` field does not need to be
* provided for the table creation.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
getTableFieldBuilder() {
if (tableBuilder_ == null) {
tableBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>(
getTable(), getParentForChildren(), isClean());
table_ = null;
}
return tableBuilder_;
}
private java.lang.Object tableId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tableId.
*/
public java.lang.String getTableId() {
java.lang.Object ref = tableId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tableId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tableId.
*/
public com.google.protobuf.ByteString getTableIdBytes() {
java.lang.Object ref = tableId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tableId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The tableId to set.
* @return This builder for chaining.
*/
public Builder setTableId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tableId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTableId() {
tableId_ = getDefaultInstance().getTableId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the table, which will become the final
* component of the table's resource name.
* </pre>
*
* <code>string table_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for tableId to set.
* @return This builder for chaining.
*/
public Builder setTableIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tableId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)
private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest();
}
public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTableRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTableRequest>() {
@java.lang.Override
public CreateTableRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateTableRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTableRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jena | 38,073 | jena-arq/src/main/java/org/apache/jena/sparql/lang/sparql_10/Legacy10.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.lang.sparql_10;
import java.math.BigInteger;
import java.util.*;
import org.apache.jena.atlas.AtlasException;
import org.apache.jena.atlas.lib.EscapeStr;
import org.apache.jena.atlas.logging.Log;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.TypeMapper;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.graph.TextDirection;
import org.apache.jena.graph.Triple;
import org.apache.jena.irix.IRIException;
import org.apache.jena.irix.IRIx;
import org.apache.jena.irix.RelativeIRIException;
import org.apache.jena.query.ARQ;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryParseException;
import org.apache.jena.riot.lang.extra.LangParserLib;
import org.apache.jena.riot.system.Checker;
import org.apache.jena.riot.system.ErrorHandler;
import org.apache.jena.riot.system.ErrorHandlerFactory;
import org.apache.jena.riot.system.RiotLib;
import org.apache.jena.sparql.ARQInternalErrorException;
import org.apache.jena.sparql.core.Prologue;
import org.apache.jena.sparql.core.TriplePath;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingBuilder;
import org.apache.jena.sparql.expr.*;
import org.apache.jena.sparql.graph.NodeConst;
import org.apache.jena.sparql.lang.LabelToNodeMap;
import org.apache.jena.sparql.lang.SyntaxVarScope;
import org.apache.jena.sparql.modify.UpdateSink;
import org.apache.jena.sparql.modify.request.*;
import org.apache.jena.sparql.path.P_Link;
import org.apache.jena.sparql.path.Path;
import org.apache.jena.sparql.syntax.*;
import org.apache.jena.update.Update;
import org.apache.jena.vocabulary.RDF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Copies of classes so that the SPARQL 1.0 parser is isolated from later SPARQL parser development. */
class Legacy10 {
/** Base class parsers, mainly SPARQL related */
static class QueryParserBase {
protected final Node XSD_TRUE = NodeConst.nodeTrue;
protected final Node XSD_FALSE = NodeConst.nodeFalse;
protected final Node nRDFtype = NodeConst.nodeRDFType;
protected final Node nRDFnil = NodeConst.nodeNil;
protected final Node nRDFfirst = NodeConst.nodeFirst;
protected final Node nRDFrest = NodeConst.nodeRest;
protected final Node nRDFsubject = RDF.Nodes.subject;
protected final Node nRDFpredicate = RDF.Nodes.predicate;
protected final Node nRDFobject = RDF.Nodes.object;
protected final Node nRDFreifies = RDF.Nodes.reifies;
// ----
// Graph patterns, true; in templates, false.
private boolean bNodesAreVariables = true;
// In DELETE, false.
private boolean bNodesAreAllowed = true;
// label => bNode for construct templates patterns
@SuppressWarnings("deprecation")
final LabelToNodeMap bNodeLabels = LabelToNodeMap.createBNodeMap();
// label => bNode (as variable) for graph patterns
final LabelToNodeMap anonVarLabels = LabelToNodeMap.createVarMap();
// This is the map used allocate blank node labels during parsing.
// 1/ It is different between CONSTRUCT and the query pattern
// 2/ Each BasicGraphPattern is a scope for blank node labels so each
// BGP causes the map to be cleared at the start of the BGP
protected LabelToNodeMap activeLabelMap = anonVarLabels;
protected Set<String> previousLabels = new HashSet<>();
// Aggregates are only allowed in places where grouping can happen.
// e.g. SELECT clause but not a FILTER.
private boolean allowAggregatesInExpressions = false;
private int aggregateDepth = 0;
// LabelToNodeMap listLabelMap = new LabelToNodeMap(true, new VarAlloc("L"));
// ----
public QueryParserBase() {}
protected Prologue prologue;
public void setPrologue(Prologue prologue) {
this.prologue = prologue;
}
public Prologue getPrologue() {
return prologue;
}
protected void setBase(String iriStr, int line, int column) {
if ( isBNodeIRI(iriStr) )
throwParseException("Blank node URI syntax used for BASE", line, column);
iriStr = resolveIRI(iriStr, line, column);
getPrologue().setBaseURI(iriStr);
}
protected void setPrefix(String prefix, String uriStr, int line, int column) {
// Should have happen in the parser because this step is "token to prefix".
// prefix = fixupPrefix(prefix, line, column);
getPrologue().setPrefix(prefix, uriStr);
}
protected void declareVersion(String version, int line, int column) {
getPrologue().setVersion(version);
}
protected void setInConstructTemplate(boolean b) {
setBNodesAreVariables(!b);
}
protected boolean getBNodesAreVariables() {
return bNodesAreVariables;
}
protected void setBNodesAreVariables(boolean bNodesAreVariables) {
this.bNodesAreVariables = bNodesAreVariables;
if ( bNodesAreVariables )
activeLabelMap = anonVarLabels;
else
activeLabelMap = bNodeLabels;
}
protected boolean getBNodesAreAllowed() {
return bNodesAreAllowed;
}
protected void setBNodesAreAllowed(boolean bNodesAreAllowed) {
this.bNodesAreAllowed = bNodesAreAllowed;
}
protected boolean getAllowAggregatesInExpressions() {
return allowAggregatesInExpressions;
}
protected void setAllowAggregatesInExpressions(boolean allowAggregatesInExpressions) {
this.allowAggregatesInExpressions = allowAggregatesInExpressions;
}
// Tracking for nested aggregates.
protected void startAggregate() {
aggregateDepth++;
}
protected int getAggregateDepth() {
return aggregateDepth;
}
protected void finishAggregate() {
aggregateDepth--;
}
protected Element compressGroupOfOneGroup(ElementGroup elg) {
// remove group of one group.
if ( elg.size() == 1 ) {
Element e1 = elg.get(0);
if ( e1 instanceof ElementGroup )
return e1;
}
return elg;
}
protected Node createLiteralInteger(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDinteger);
}
protected Node createLiteralDouble(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDdouble);
}
protected Node createLiteralDecimal(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDdecimal);
}
protected Node stripSign(Node node) {
if ( !node.isLiteral() )
return node;
String lex = node.getLiteralLexicalForm();
String lang = node.getLiteralLanguage();
RDFDatatype dt = node.getLiteralDatatype();
if ( !lex.startsWith("-") && !lex.startsWith("+") )
throw new ARQInternalErrorException("Literal does not start with a sign: " + lex);
lex = lex.substring(1);
return NodeFactory.createLiteral(lex, lang, dt);
}
// Because of Java (Java strings have surrogate pairs) we only detect singleton surrogates.
protected void checkString(String string, int line, int column) {
// Checks for bare surrogate pairs.
for ( int i = 0; i < string.length(); i++ ) {
// Not "codePointAt" which does surrogate processing.
char ch = string.charAt(i);
// Check surrogate pairs are in pairs. Pairs are high-low.
if ( Character.isLowSurrogate(ch) )
throw new QueryParseException("Bad surrogate pair (low surrogate without high surrogate)", line, column);
if ( Character.isHighSurrogate(ch) ) {
i++;
if ( i == string.length() )
throw new QueryParseException("Bad surrogate pair (end of string)", line, column);
char ch1 = string.charAt(i);
if ( !Character.isLowSurrogate(ch1) ) {
throw new QueryParseException("Bad surrogate pair (high surrogate not followed by low surrogate)", line, column);
}
}
}
}
// ---- Literals
// Strings, lang strings, dirlang strings and datatyped literals.
protected Node createLiteralString(String lexicalForm, int line, int column) {
return NodeFactory.createLiteralString(lexicalForm);
}
protected Node createLiteralDT(String lexicalForm, String datatypeURI, int line, int column) {
// Can't have type and lang tag in parsing.
return createLiteralAny(lexicalForm, null, null, datatypeURI, line, column);
}
protected Node createLiteralLang(String lexicalForm, String langTagDir, int line, int column) {
// Can't have type and lang tag in parsing.
return createLiteralAny(lexicalForm, langTagDir, null, null, line, column);
}
/**
* Create a literal, given all possible component parts.
*/
private Node createLiteralAny(String lexicalForm, String langTag, String textDirStr, String datatypeURI, int line, int column) {
Node n = null;
// Can't have type and lang tag in parsing.
if ( datatypeURI != null ) {
if ( langTag != null || textDirStr != null )
throw new ARQInternalErrorException("Datatype with lang/langDir");
RDFDatatype dType = TypeMapper.getInstance().getSafeTypeByName(datatypeURI);
n = NodeFactory.createLiteralDT(lexicalForm, dType);
return n;
}
// datatypeURI is null
if ( langTag == null && textDirStr == null )
return NodeFactory.createLiteralString(lexicalForm);
// Strip '@'
langTag = langTag.substring(1);
// See if we split langTag into language tag and base direction.
String textDirStr2 = textDirStr;
String langTag2 = langTag;
if ( textDirStr == null ) {
int idx = langTag.indexOf("--");
if ( idx >= 0 ) {
textDirStr2 = langTag.substring(idx+2);
langTag2 = langTag.substring(0, idx);
}
}
if ( langTag2 != null && textDirStr2 != null ) {
if ( ! TextDirection.isValid(textDirStr2) )
throw new QueryParseException("Illegal base direction: '"+textDirStr2+"'", line, column);
return NodeFactory.createLiteralDirLang(lexicalForm, langTag2, textDirStr2);
}
// langTag != null, textDirStr == null.
return NodeFactory.createLiteralLang(lexicalForm, langTag2);
}
// protected String langFromToken(String image) {
// int idx = image.indexOf("--");
// if ( idx < 0 )
// // No direction; remove @
// return image.substring(1);
// return image.substring(1, idx);
// }
//
// protected String dirFromToken(String image) {
// int idx = image.indexOf("--");
// if ( idx < 0 )
// return null;
// // Not checked for value
// return image.substring(idx+2);
// }
protected long integerValue(String s) {
try {
if ( s.startsWith("+") )
s = s.substring(1);
if ( s.startsWith("0x") ) {
// Hex
s = s.substring(2);
return Long.parseLong(s, 16);
}
return Long.parseLong(s);
} catch (NumberFormatException ex) {
try {
// Possible too large for a long.
BigInteger integer = new BigInteger(s);
throwParseException("Number '" + s + "' is a valid number but can't not be stored in a long");
} catch (NumberFormatException ex2) {}
throw new QueryParseException(ex, -1, -1);
}
}
protected double doubleValue(String s) {
if ( s.startsWith("+") )
s = s.substring(1);
double valDouble = Double.parseDouble(s);
return valDouble;
}
/** Remove first and last characters (e.g. ' or "") from a string */
protected static String stripQuotes(String s) {
return s.substring(1, s.length() - 1);
}
/** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */
protected static String stripQuotes3(String s) {
return s.substring(3, s.length() - 3);
}
/** remove the first n characters from the string */
protected static String stripChars(String s, int n) {
return LangParserLib.stripChars(s, n);
}
protected Var createVariable(String s, int line, int column) {
s = s.substring(1); // Drop the marker
// This is done by the parser input stream nowadays.
// s = unescapeCodePoint(s, line, column);
// Check \ u did not put in any illegals.
return Var.alloc(s);
}
protected Node createTripleTerm(Node s, Node p, Node o, int line, int column) {
return NodeFactory.createTripleTerm(s, p, o);
}
// ---- IRIs and Nodes
protected String resolveQuotedIRI(String iriStr, int line, int column) {
iriStr = stripQuotes(iriStr);
iriStr = unescapeUnicode(iriStr, line, column);
// Check for Unicode surrogates
checkString(iriStr, line, column);
return resolveIRI(iriStr, line, column);
}
public static final String ParserLoggerName = "SPARQL";
public static Logger parserLog = LoggerFactory.getLogger(ParserLoggerName);
private static final ErrorHandler errorHandler = ErrorHandlerFactory.errorHandlerStd(parserLog);
protected String resolveIRI(String iriStr, int line, int column) {
if ( isBNodeIRI(iriStr) )
return iriStr;
if ( getPrologue() == null )
return iriStr;
if ( getPrologue().getBase() == null )
return iriStr;
IRIx irix = resolveIRIx(iriStr, line, column);
return irix.toString();
}
private IRIx resolveIRIx(String iriStr, long line, long col) {
// Aligns with ParserProfileStd.internalMakeIRI
// Hard to do a meaning DRY because SPARQL works in strings
// where as ParserProfile works in IRix.
if ( iriStr.contains(" ") ) {
// Specific check for spaces.
errorHandler.warning("Bad IRI: <" + iriStr + "> Spaces are not legal in URIs/IRIs.", line, col);
return IRIx.createAny(iriStr);
}
try {
IRIx resolvedIRIx = getPrologue().getBase().resolve(iriStr);
return resolvedIRIx;
} catch (RelativeIRIException ex) {
errorHandler.error("Relative IRI: " + iriStr, line, col);
return IRIx.createAny(iriStr);
} catch (IRIException ex) {
// Same code as Checker.iriViolations
String msg = ex.getMessage();
Checker.iriViolationMessage(iriStr, true, msg, line, col, errorHandler);
return IRIx.createAny(iriStr);
}
}
protected String resolvePName(String prefixedName, int line, int column) {
// It's legal.
int idx = prefixedName.indexOf(':');
// -- Escapes in local name
String prefix = prefixedName.substring(0, idx);
String local = prefixedName.substring(idx + 1);
local = unescapePName(local, line, column);
prefixedName = prefix + ":" + local;
// --
String s = getPrologue().expandPrefixedName(prefixedName);
if ( s == null ) {
if ( ARQ.isTrue(ARQ.fixupUndefinedPrefixes) )
return RiotLib.fixupPrefixes.apply(prefixedName);
throwParseException("Unresolved prefixed name: " + prefixedName, line, column);
}
return s;
}
private boolean skolomizedBNodes = ARQ.isTrue(ARQ.constantBNodeLabels);
protected Node createNode(String iri) {
if ( skolomizedBNodes )
return RiotLib.createIRIorBNode(iri);
else
return NodeFactory.createURI(iri);
}
protected boolean isBNodeIRI(String iri) {
return skolomizedBNodes && RiotLib.isBNodeIRI(iri);
}
// -------- Basic Graph Patterns and Blank Node label scopes
// A BasicGraphPattern is any sequence of TripleBlocks, separated by filters,
// but not by other graph patterns.
protected void startBasicGraphPattern() {
activeLabelMap.clear();
}
protected void endBasicGraphPattern() {
previousLabels.addAll(activeLabelMap.getLabels());
}
protected void startTriplesBlock() {}
protected void endTriplesBlock() {}
// On entry to a new group, the current BGP is ended.
protected void startGroup(ElementGroup elg) {
endBasicGraphPattern();
startBasicGraphPattern();
}
protected void endGroup(ElementGroup elg) {
endBasicGraphPattern();
}
// --------
protected void checkConcrete(Node n, int line, int column) {
if ( !n.isConcrete() )
throwParseException("Term is not concrete: " + n, line, column);
}
// BNode from a list
// protected Node createListNode()
// { return listLabelMap.allocNode(); }
protected Node createListNode(int line, int column) {
return createBNode(line, column);
}
// Unlabelled bNode.
protected Node createBNode(int line, int column) {
if ( !bNodesAreAllowed )
throwParseException("Blank nodes not allowed in DELETE templates", line, column);
return activeLabelMap.allocNode();
}
// Labelled bNode.
protected Node createBNode(String label, int line, int column) {
if ( !bNodesAreAllowed )
throwParseException("Blank nodes not allowed in DELETE templates: " + label, line, column);
if ( previousLabels.contains(label) )
throwParseException("Blank node label reuse not allowed at this point: " + label, line, column);
// label = unescapeCodePoint(label, line, column);
return activeLabelMap.asNode(label);
}
protected Node preConditionReifier(Node s, Node p, Path path, Node o, int line, int column) {
if ( p != null )
return p;
if ( path instanceof P_Link )
return ((P_Link)path).getNode();
throwParseException("Only simple paths allowed with reifier syntax", line, column);
return null;
}
protected Expr createExprExists(Element element) {
return new E_Exists(element);
}
protected Expr createExprNotExists(Element element) {
// Could negate here.
return new E_NotExists(element);
}
// Convert a parser token, which includes the final ":", to a prefix name.
protected String fixupPrefix(String prefix, int line, int column) {
// \ u processing!
if ( prefix.endsWith(":") )
prefix = prefix.substring(0, prefix.length() - 1);
return prefix;
}
protected void setAccGraph(QuadAccSink acc, Node gn) {
acc.setGraph(gn);
}
protected void insert(TripleCollector acc, Node s, Node p, Node o) {
acc.addTriple(Triple.create(s, p, o));
}
protected void insert(TripleCollectorMark acc, int index, Node s, Node p, Node o) {
acc.addTriple(index, Triple.create(s, p, o));
}
protected void insert(TripleCollector acc, Node s, Node p, Path path, Node o) {
if ( p == null )
acc.addTriplePath(new TriplePath(s, path, o));
else
acc.addTriple(Triple.create(s, p, o));
}
protected void insert(TripleCollectorMark acc, int index, Node s, Node p, Path path, Node o) {
if ( p == null )
acc.addTriplePath(index, new TriplePath(s, path, o));
else
acc.addTriple(index, Triple.create(s, p, o));
}
protected void insert(TripleCollector target, ElementPathBlock source) {
for ( TriplePath path : source.getPattern() ) {
if ( path.isTriple() ) {
target.addTriple(path.asTriple());
} else {
target.addTriplePath(path);
}
}
}
protected Node insertTripleReifier(TripleCollector acc, Node reifierId, Node s, Node p, Node o, int line, int column) {
Node tripleTerm = createTripleTerm(s, p, o, line, column);
if ( reifierId == null )
reifierId = createBNode(line, column);
Triple t = Triple.create(reifierId, nRDFreifies, tripleTerm);
acc.addTriple(t);
return reifierId;
}
private Node annotationReifierId = null;
protected void setReifierId(Node reifId) {
annotationReifierId = reifId;
}
protected Node getOrAllocReifierId(TripleCollector acc, Node s, Node p, Node o, int line, int column) {
if ( annotationReifierId != null )
return annotationReifierId;
Node reifierId = createBNode(-1, -1);
insertTripleReifier(acc, reifierId, s, p, o, line, column);
return reifierId;
}
protected void clearReifierId() {
annotationReifierId = null;
}
protected Expr asExpr(Node n) {
return ExprLib.nodeToExpr(n);
}
// Makers of functions that need more than just a simple "new E_...".
// IRI(rel)
protected Expr makeFunction_IRI(Expr expr) {
return new E_IRI(prologue.getBaseURI(), expr);
}
protected Expr makeFunction_URI(Expr expr) {
return new E_URI(prologue.getBaseURI(), expr);
}
// IRI(base, rel) or IRI(rel, null)
protected Expr makeFunction_IRI(Expr expr1, Expr expr2) {
if ( expr2 == null )
return makeFunction_IRI(expr1);
return new E_IRI2(expr1, prologue.getBaseURI(), expr2);
}
protected Expr makeFunction_URI(Expr expr1, Expr expr2) {
if ( expr2 == null )
return makeFunction_URI(expr1);
return new E_URI2(expr1, prologue.getBaseURI(), expr2);
}
// Create a E_BNode function.
protected Expr makeFunction_BNode() {
return E_BNode.create();
}
protected Expr makeFunction_BNode(Expr expr) {
return E_BNode.create(expr);
}
// Utilities to remove escapes in strings.
/* package-testing */ static String unescapeStr(String s) {
return unescapeStr(s, -1, -1);
}
// Do we need the line/column versions?
// Why not catch exceptions and convert to QueryParseException
protected static String unescapeStr(String s, int line, int column) {
return unescape(s, '\\', false, line, column);
}
/** Unescape unicode - no surrogate processing. */
protected static String unescapeUnicode(String s, int line, int column) {
return unescape(s, '\\', true, line, column);
}
// Worker function
protected static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) {
try {
return EscapeStr.unescape(s, escape, pointCodeOnly);
} catch (AtlasException ex) {
throwParseException(ex.getMessage(), line, column);
return null;
}
}
protected static String unescapePName(String s, int line, int column) {
char escape = '\\';
int idx = s.indexOf(escape);
if ( idx == -1 )
return s;
int len = s.length();
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < len; i++ ) {
char ch = s.charAt(i);
// Keep line and column numbers.
switch (ch) {
case '\n' :
case '\r' :
line++;
column = 1;
break;
default :
column++;
break;
}
if ( ch != escape ) {
sb.append(ch);
continue;
}
// Escape
if ( i >= s.length() - 1 )
throwParseException("Illegal escape at end of string", line, column);
char ch2 = s.charAt(i + 1);
column = column + 1;
i = i + 1;
switch (ch2) { // PN_LOCAL_ESC
case '_' :
case '~' :
case '.' :
case '-' :
case '!' :
case '$' :
case '&' :
case '\'' :
case '(' :
case ')' :
case '*' :
case '+' :
case ',' :
case ';' :
case '=' :
case ':' :
case '/' :
case '?' :
case '#' :
case '@' :
case '%' :
sb.append(ch2);
break;
default :
throwParseException("Illegal prefix name escape: " + ch2, line, column);
}
}
return sb.toString();
}
protected void warnDeprecation(String msg) {
Log.warn(this, msg);
}
public static void throwParseException(String msg, int line, int column) {
throw new QueryParseException("Line " + line + ", column " + column + ": " + msg, line, column);
}
public static void throwParseException(String msg) {
throw new QueryParseException(msg, -1, -1);
}
}
/** Class that has all the parse event operations and other query/update specific things */
static class SPARQLParserBase extends QueryParserBase {
private Deque<Query> stack = new ArrayDeque<>();
protected Query query;
protected SPARQLParserBase() {}
public void setQuery(Query q) {
query = q;
setPrologue(q);
}
public Query getQuery() { return query; }
// The ARQ parser is both query and update languages.
// // ---- SPARQL/Update (Submission)
// private UpdateRequest requestSubmission = null;
//
// protected UpdateRequest getUpdateRequestSubmission() { return requestSubmission; }
// public void setUpdateRequest(UpdateRequest request)
// {
// setPrologue(request);
// this.requestSubmission = request;
// // And create a query because we may have nested selects.
// this.query = new Query ();
// }
private UpdateSink sink = null;
// Places to push settings across points where we reset.
private boolean oldBNodesAreVariables;
private boolean oldBNodesAreAllowed;
// Count of subSelect nesting.
// Level 0 is top level.
// Level -1 is not in a pattern WHERE clause.
private int queryLevel = -1;
private Deque<Set<String>> stackPreviousLabels = new ArrayDeque<>();
private Deque<LabelToNodeMap> stackCurrentLabels = new ArrayDeque<>();
public void setUpdate(Prologue prologue, UpdateSink sink) {
this.sink = sink;
this.query = new Query();
setPrologue(prologue);
}
// Signal start/finish of units
protected void startQuery() {}
protected void finishQuery() {
query.ensureResultVars();
}
protected void startUpdateRequest() {}
protected void finishUpdateRequest() {}
// protected void startBasicGraphPattern()
// { activeLabelMap.clear(); }
//
// protected void endBasicGraphPattern()
// { oldLabels.addAll(activeLabelMap.getLabels()); }
protected void startUpdateOperation() {}
protected void finishUpdateOperation() {}
protected void startModifyUpdate() { }
protected void finishModifyUpdate() { }
protected void startDataInsert(QuadDataAccSink qd, int line, int col) {
oldBNodesAreVariables = getBNodesAreVariables();
setBNodesAreVariables(false);
activeLabelMap.clear();
}
protected void finishDataInsert(QuadDataAccSink qd, int line, int col) {
previousLabels.addAll(activeLabelMap.getLabels());
activeLabelMap.clear();
setBNodesAreVariables(oldBNodesAreVariables);
}
protected void startDataDelete(QuadDataAccSink qd, int line, int col) {
oldBNodesAreAllowed = getBNodesAreAllowed();
setBNodesAreAllowed(false);
}
protected void finishDataDelete(QuadDataAccSink qd, int line, int col) {
setBNodesAreAllowed(oldBNodesAreAllowed);
}
// These can be nested with subSELECTs but subSELECTs share bNodeLabel state.
protected void startWherePattern() {
queryLevel += 1;
if ( queryLevel == 0 ) {
pushLabelState();
clearLabelState();
}
}
protected void finishWherePattern() {
if ( queryLevel == 0 )
popLabelState();
queryLevel -= 1;
}
// This holds the accumulation of labels from earlier INSERT DATA
// across template creation (bNode in templates get cloned before
// going into the data).
protected void startInsertTemplate(QuadAcc qd, int line, int col) {
oldBNodesAreVariables = getBNodesAreVariables();
setBNodesAreVariables(false);
pushLabelState();
}
protected void finishInsertTemplate(QuadAcc qd, int line, int col) {
// Restore accumulated labels.
popLabelState();
// This also set the bnode syntax to node functionality - must be after
// popLabelState.
setBNodesAreVariables(oldBNodesAreVariables);
}
// No bNodes in delete templates.
protected void startDeleteTemplate(QuadAcc qd, int line, int col) {
oldBNodesAreAllowed = getBNodesAreAllowed();
setBNodesAreAllowed(false);
}
protected void finishDeleteTemplate(QuadAcc qd, int line, int col) {
setBNodesAreAllowed(oldBNodesAreAllowed);
}
protected void emitUpdate(Update update) {
// The parser can send null if it already performed an INSERT_DATA or
// DELETE_DATA
if ( null != update ) {
// Verify each operation
verifyUpdate(update);
sink.send(update);
}
}
private static UpdateVisitor v = new UpdateVisitorBase() {
@Override
public void visit(UpdateModify mod) {
SyntaxVarScope.checkElement(mod.getWherePattern());
}
};
private void verifyUpdate(Update update) {
update.visit(v);
}
protected QuadDataAccSink createInsertDataSink() {
return sink.createInsertDataSink();
}
protected QuadDataAccSink createDeleteDataSink() {
return sink.createDeleteDataSink();
}
protected void pushQuery() {
if ( query == null )
throw new ARQInternalErrorException("Parser query object is null");
stack.push(query);
}
protected void startSubSelect(int line, int col) {
pushQuery();
query = newSubQuery(getPrologue());
}
protected Query newSubQuery(Prologue progloue) {
// The parser uses the same prologue throughout the parsing process.
Query subQuery = new Query();
subQuery.setSyntax(query.getSyntax());
return subQuery;
}
protected void popQuery() {
query = stack.pop();
}
protected Query endSubSelect(int line, int column) {
Query subQuery = query;
if ( ! subQuery.isSelectType() )
throwParseException("Subquery not a SELECT query", line, column);
// Sort out SELECT *
subQuery.ensureResultVars();
popQuery();
return subQuery;
}
private List<Var> variables = null;
private List<Binding> values = null;
private BindingBuilder rowBuilder;
private int currentColumn = -1;
// Trailing VALUES.
protected void startValuesClause(int line, int col) {
variables = new ArrayList<>();
values = new ArrayList<>();
rowBuilder = Binding.builder();
}
protected void finishValuesClause(int line, int col)
{
getQuery().setValuesDataBlock(variables, values);
}
// ElementData. VALUES in the WHERE clause.
protected void startInlineData(List<Var> vars, List<Binding> rows, int line, int col) {
variables = vars;
values = rows;
rowBuilder = Binding.builder();
}
protected void finishInlineData(int line, int col)
{}
protected void emitDataBlockVariable(Var v) { variables.add(v); }
protected void startDataBlockValueRow(int line, int col) {
rowBuilder.reset();
currentColumn = -1;
}
protected void emitDataBlockValue(Node n, int line, int col) {
currentColumn++;
if ( currentColumn >= variables.size() )
// Exception will be thrown later when we have the complete row count.
return;
Var v = variables.get(currentColumn);
if ( n != null && ! n.isConcrete() ) {
String msg = QueryParseException.formatMessage("Term is not concrete: "+n, line, col);
throw new QueryParseException(msg, line, col);
}
if ( n != null )
rowBuilder.add(v, n);
}
protected void finishDataBlockValueRow(int line, int col) {
//if ( variables.size() != currentValueRow().size() )
if ( currentColumn+1 != variables.size() )
{
String msg = String.format("Mismatch: %d variables but %d values",variables.size(), currentColumn+1);
msg = QueryParseException.formatMessage(msg, line, col);
throw new QueryParseException(msg, line , col);
}
values.add(rowBuilder.build());
}
private void pushLabelState() {
// Hide used labels already tracked.
stackPreviousLabels.push(previousLabels);
stackCurrentLabels.push(activeLabelMap);
previousLabels = new HashSet<>();
activeLabelMap.clear();
}
private void popLabelState() {
previousLabels = stackPreviousLabels.pop();
activeLabelMap = stackCurrentLabels.pop();
}
private void clearLabelState() {
activeLabelMap.clear();
previousLabels.clear();
}
}
}
|
apache/jena | 38,073 | jena-arq/src/main/java/org/apache/jena/sparql/lang/sparql_11/Legacy11.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.lang.sparql_11;
import java.math.BigInteger;
import java.util.*;
import org.apache.jena.atlas.AtlasException;
import org.apache.jena.atlas.lib.EscapeStr;
import org.apache.jena.atlas.logging.Log;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.TypeMapper;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.graph.TextDirection;
import org.apache.jena.graph.Triple;
import org.apache.jena.irix.IRIException;
import org.apache.jena.irix.IRIx;
import org.apache.jena.irix.RelativeIRIException;
import org.apache.jena.query.ARQ;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryParseException;
import org.apache.jena.riot.lang.extra.LangParserLib;
import org.apache.jena.riot.system.Checker;
import org.apache.jena.riot.system.ErrorHandler;
import org.apache.jena.riot.system.ErrorHandlerFactory;
import org.apache.jena.riot.system.RiotLib;
import org.apache.jena.sparql.ARQInternalErrorException;
import org.apache.jena.sparql.core.Prologue;
import org.apache.jena.sparql.core.TriplePath;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingBuilder;
import org.apache.jena.sparql.expr.*;
import org.apache.jena.sparql.graph.NodeConst;
import org.apache.jena.sparql.lang.LabelToNodeMap;
import org.apache.jena.sparql.lang.SyntaxVarScope;
import org.apache.jena.sparql.modify.UpdateSink;
import org.apache.jena.sparql.modify.request.*;
import org.apache.jena.sparql.path.P_Link;
import org.apache.jena.sparql.path.Path;
import org.apache.jena.sparql.syntax.*;
import org.apache.jena.update.Update;
import org.apache.jena.vocabulary.RDF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Copies of classes so that the SPARQL 1.0 parser is isolated from later SPARQL parser development. */
class Legacy11 {
/** Base class parsers, mainly SPARQL related */
static class QueryParserBase {
protected final Node XSD_TRUE = NodeConst.nodeTrue;
protected final Node XSD_FALSE = NodeConst.nodeFalse;
protected final Node nRDFtype = NodeConst.nodeRDFType;
protected final Node nRDFnil = NodeConst.nodeNil;
protected final Node nRDFfirst = NodeConst.nodeFirst;
protected final Node nRDFrest = NodeConst.nodeRest;
protected final Node nRDFsubject = RDF.Nodes.subject;
protected final Node nRDFpredicate = RDF.Nodes.predicate;
protected final Node nRDFobject = RDF.Nodes.object;
protected final Node nRDFreifies = RDF.Nodes.reifies;
// ----
// Graph patterns, true; in templates, false.
private boolean bNodesAreVariables = true;
// In DELETE, false.
private boolean bNodesAreAllowed = true;
// label => bNode for construct templates patterns
@SuppressWarnings("deprecation")
final LabelToNodeMap bNodeLabels = LabelToNodeMap.createBNodeMap();
// label => bNode (as variable) for graph patterns
final LabelToNodeMap anonVarLabels = LabelToNodeMap.createVarMap();
// This is the map used allocate blank node labels during parsing.
// 1/ It is different between CONSTRUCT and the query pattern
// 2/ Each BasicGraphPattern is a scope for blank node labels so each
// BGP causes the map to be cleared at the start of the BGP
protected LabelToNodeMap activeLabelMap = anonVarLabels;
protected Set<String> previousLabels = new HashSet<>();
// Aggregates are only allowed in places where grouping can happen.
// e.g. SELECT clause but not a FILTER.
private boolean allowAggregatesInExpressions = false;
private int aggregateDepth = 0;
// LabelToNodeMap listLabelMap = new LabelToNodeMap(true, new VarAlloc("L"));
// ----
public QueryParserBase() {}
protected Prologue prologue;
public void setPrologue(Prologue prologue) {
this.prologue = prologue;
}
public Prologue getPrologue() {
return prologue;
}
protected void setBase(String iriStr, int line, int column) {
if ( isBNodeIRI(iriStr) )
throwParseException("Blank node URI syntax used for BASE", line, column);
iriStr = resolveIRI(iriStr, line, column);
getPrologue().setBaseURI(iriStr);
}
protected void setPrefix(String prefix, String uriStr, int line, int column) {
// Should have happen in the parser because this step is "token to prefix".
// prefix = fixupPrefix(prefix, line, column);
getPrologue().setPrefix(prefix, uriStr);
}
protected void declareVersion(String version, int line, int column) {
getPrologue().setVersion(version);
}
protected void setInConstructTemplate(boolean b) {
setBNodesAreVariables(!b);
}
protected boolean getBNodesAreVariables() {
return bNodesAreVariables;
}
protected void setBNodesAreVariables(boolean bNodesAreVariables) {
this.bNodesAreVariables = bNodesAreVariables;
if ( bNodesAreVariables )
activeLabelMap = anonVarLabels;
else
activeLabelMap = bNodeLabels;
}
protected boolean getBNodesAreAllowed() {
return bNodesAreAllowed;
}
protected void setBNodesAreAllowed(boolean bNodesAreAllowed) {
this.bNodesAreAllowed = bNodesAreAllowed;
}
protected boolean getAllowAggregatesInExpressions() {
return allowAggregatesInExpressions;
}
protected void setAllowAggregatesInExpressions(boolean allowAggregatesInExpressions) {
this.allowAggregatesInExpressions = allowAggregatesInExpressions;
}
// Tracking for nested aggregates.
protected void startAggregate() {
aggregateDepth++;
}
protected int getAggregateDepth() {
return aggregateDepth;
}
protected void finishAggregate() {
aggregateDepth--;
}
protected Element compressGroupOfOneGroup(ElementGroup elg) {
// remove group of one group.
if ( elg.size() == 1 ) {
Element e1 = elg.get(0);
if ( e1 instanceof ElementGroup )
return e1;
}
return elg;
}
protected Node createLiteralInteger(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDinteger);
}
protected Node createLiteralDouble(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDdouble);
}
protected Node createLiteralDecimal(String lexicalForm) {
return NodeFactory.createLiteralDT(lexicalForm, XSDDatatype.XSDdecimal);
}
protected Node stripSign(Node node) {
if ( !node.isLiteral() )
return node;
String lex = node.getLiteralLexicalForm();
String lang = node.getLiteralLanguage();
RDFDatatype dt = node.getLiteralDatatype();
if ( !lex.startsWith("-") && !lex.startsWith("+") )
throw new ARQInternalErrorException("Literal does not start with a sign: " + lex);
lex = lex.substring(1);
return NodeFactory.createLiteral(lex, lang, dt);
}
// Because of Java (Java strings have surrogate pairs) we only detect singleton surrogates.
protected void checkString(String string, int line, int column) {
// Checks for bare surrogate pairs.
for ( int i = 0; i < string.length(); i++ ) {
// Not "codePointAt" which does surrogate processing.
char ch = string.charAt(i);
// Check surrogate pairs are in pairs. Pairs are high-low.
if ( Character.isLowSurrogate(ch) )
throw new QueryParseException("Bad surrogate pair (low surrogate without high surrogate)", line, column);
if ( Character.isHighSurrogate(ch) ) {
i++;
if ( i == string.length() )
throw new QueryParseException("Bad surrogate pair (end of string)", line, column);
char ch1 = string.charAt(i);
if ( !Character.isLowSurrogate(ch1) ) {
throw new QueryParseException("Bad surrogate pair (high surrogate not followed by low surrogate)", line, column);
}
}
}
}
// ---- Literals
// Strings, lang strings, dirlang strings and datatyped literals.
protected Node createLiteralString(String lexicalForm, int line, int column) {
return NodeFactory.createLiteralString(lexicalForm);
}
protected Node createLiteralDT(String lexicalForm, String datatypeURI, int line, int column) {
// Can't have type and lang tag in parsing.
return createLiteralAny(lexicalForm, null, null, datatypeURI, line, column);
}
protected Node createLiteralLang(String lexicalForm, String langTagDir, int line, int column) {
// Can't have type and lang tag in parsing.
return createLiteralAny(lexicalForm, langTagDir, null, null, line, column);
}
/**
* Create a literal, given all possible component parts.
*/
private Node createLiteralAny(String lexicalForm, String langTag, String textDirStr, String datatypeURI, int line, int column) {
Node n = null;
// Can't have type and lang tag in parsing.
if ( datatypeURI != null ) {
if ( langTag != null || textDirStr != null )
throw new ARQInternalErrorException("Datatype with lang/langDir");
RDFDatatype dType = TypeMapper.getInstance().getSafeTypeByName(datatypeURI);
n = NodeFactory.createLiteralDT(lexicalForm, dType);
return n;
}
// datatypeURI is null
if ( langTag == null && textDirStr == null )
return NodeFactory.createLiteralString(lexicalForm);
// Strip '@'
langTag = langTag.substring(1);
// See if we split langTag into language tag and base direction.
String textDirStr2 = textDirStr;
String langTag2 = langTag;
if ( textDirStr == null ) {
int idx = langTag.indexOf("--");
if ( idx >= 0 ) {
textDirStr2 = langTag.substring(idx+2);
langTag2 = langTag.substring(0, idx);
}
}
if ( langTag2 != null && textDirStr2 != null ) {
if ( ! TextDirection.isValid(textDirStr2) )
throw new QueryParseException("Illegal base direction: '"+textDirStr2+"'", line, column);
return NodeFactory.createLiteralDirLang(lexicalForm, langTag2, textDirStr2);
}
// langTag != null, textDirStr == null.
return NodeFactory.createLiteralLang(lexicalForm, langTag2);
}
// protected String langFromToken(String image) {
// int idx = image.indexOf("--");
// if ( idx < 0 )
// // No direction; remove @
// return image.substring(1);
// return image.substring(1, idx);
// }
//
// protected String dirFromToken(String image) {
// int idx = image.indexOf("--");
// if ( idx < 0 )
// return null;
// // Not checked for value
// return image.substring(idx+2);
// }
protected long integerValue(String s) {
try {
if ( s.startsWith("+") )
s = s.substring(1);
if ( s.startsWith("0x") ) {
// Hex
s = s.substring(2);
return Long.parseLong(s, 16);
}
return Long.parseLong(s);
} catch (NumberFormatException ex) {
try {
// Possible too large for a long.
BigInteger integer = new BigInteger(s);
throwParseException("Number '" + s + "' is a valid number but can't not be stored in a long");
} catch (NumberFormatException ex2) {}
throw new QueryParseException(ex, -1, -1);
}
}
protected double doubleValue(String s) {
if ( s.startsWith("+") )
s = s.substring(1);
double valDouble = Double.parseDouble(s);
return valDouble;
}
/** Remove first and last characters (e.g. ' or "") from a string */
protected static String stripQuotes(String s) {
return s.substring(1, s.length() - 1);
}
/** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */
protected static String stripQuotes3(String s) {
return s.substring(3, s.length() - 3);
}
/** remove the first n characters from the string */
protected static String stripChars(String s, int n) {
return LangParserLib.stripChars(s, n);
}
protected Var createVariable(String s, int line, int column) {
s = s.substring(1); // Drop the marker
// This is done by the parser input stream nowadays.
// s = unescapeCodePoint(s, line, column);
// Check \ u did not put in any illegals.
return Var.alloc(s);
}
protected Node createTripleTerm(Node s, Node p, Node o, int line, int column) {
return NodeFactory.createTripleTerm(s, p, o);
}
// ---- IRIs and Nodes
protected String resolveQuotedIRI(String iriStr, int line, int column) {
iriStr = stripQuotes(iriStr);
iriStr = unescapeUnicode(iriStr, line, column);
// Check for Unicode surrogates
checkString(iriStr, line, column);
return resolveIRI(iriStr, line, column);
}
public static final String ParserLoggerName = "SPARQL";
public static Logger parserLog = LoggerFactory.getLogger(ParserLoggerName);
private static final ErrorHandler errorHandler = ErrorHandlerFactory.errorHandlerStd(parserLog);
protected String resolveIRI(String iriStr, int line, int column) {
if ( isBNodeIRI(iriStr) )
return iriStr;
if ( getPrologue() == null )
return iriStr;
if ( getPrologue().getBase() == null )
return iriStr;
IRIx irix = resolveIRIx(iriStr, line, column);
return irix.toString();
}
private IRIx resolveIRIx(String iriStr, long line, long col) {
// Aligns with ParserProfileStd.internalMakeIRI
// Hard to do a meaning DRY because SPARQL works in strings
// where as ParserProfile works in IRix.
if ( iriStr.contains(" ") ) {
// Specific check for spaces.
errorHandler.warning("Bad IRI: <" + iriStr + "> Spaces are not legal in URIs/IRIs.", line, col);
return IRIx.createAny(iriStr);
}
try {
IRIx resolvedIRIx = getPrologue().getBase().resolve(iriStr);
return resolvedIRIx;
} catch (RelativeIRIException ex) {
errorHandler.error("Relative IRI: " + iriStr, line, col);
return IRIx.createAny(iriStr);
} catch (IRIException ex) {
// Same code as Checker.iriViolations
String msg = ex.getMessage();
Checker.iriViolationMessage(iriStr, true, msg, line, col, errorHandler);
return IRIx.createAny(iriStr);
}
}
protected String resolvePName(String prefixedName, int line, int column) {
// It's legal.
int idx = prefixedName.indexOf(':');
// -- Escapes in local name
String prefix = prefixedName.substring(0, idx);
String local = prefixedName.substring(idx + 1);
local = unescapePName(local, line, column);
prefixedName = prefix + ":" + local;
// --
String s = getPrologue().expandPrefixedName(prefixedName);
if ( s == null ) {
if ( ARQ.isTrue(ARQ.fixupUndefinedPrefixes) )
return RiotLib.fixupPrefixes.apply(prefixedName);
throwParseException("Unresolved prefixed name: " + prefixedName, line, column);
}
return s;
}
private boolean skolomizedBNodes = ARQ.isTrue(ARQ.constantBNodeLabels);
protected Node createNode(String iri) {
if ( skolomizedBNodes )
return RiotLib.createIRIorBNode(iri);
else
return NodeFactory.createURI(iri);
}
protected boolean isBNodeIRI(String iri) {
return skolomizedBNodes && RiotLib.isBNodeIRI(iri);
}
// -------- Basic Graph Patterns and Blank Node label scopes
// A BasicGraphPattern is any sequence of TripleBlocks, separated by filters,
// but not by other graph patterns.
protected void startBasicGraphPattern() {
activeLabelMap.clear();
}
protected void endBasicGraphPattern() {
previousLabels.addAll(activeLabelMap.getLabels());
}
protected void startTriplesBlock() {}
protected void endTriplesBlock() {}
// On entry to a new group, the current BGP is ended.
protected void startGroup(ElementGroup elg) {
endBasicGraphPattern();
startBasicGraphPattern();
}
protected void endGroup(ElementGroup elg) {
endBasicGraphPattern();
}
// --------
protected void checkConcrete(Node n, int line, int column) {
if ( !n.isConcrete() )
throwParseException("Term is not concrete: " + n, line, column);
}
// BNode from a list
// protected Node createListNode()
// { return listLabelMap.allocNode(); }
protected Node createListNode(int line, int column) {
return createBNode(line, column);
}
// Unlabelled bNode.
protected Node createBNode(int line, int column) {
if ( !bNodesAreAllowed )
throwParseException("Blank nodes not allowed in DELETE templates", line, column);
return activeLabelMap.allocNode();
}
// Labelled bNode.
protected Node createBNode(String label, int line, int column) {
if ( !bNodesAreAllowed )
throwParseException("Blank nodes not allowed in DELETE templates: " + label, line, column);
if ( previousLabels.contains(label) )
throwParseException("Blank node label reuse not allowed at this point: " + label, line, column);
// label = unescapeCodePoint(label, line, column);
return activeLabelMap.asNode(label);
}
protected Node preConditionReifier(Node s, Node p, Path path, Node o, int line, int column) {
if ( p != null )
return p;
if ( path instanceof P_Link )
return ((P_Link)path).getNode();
throwParseException("Only simple paths allowed with reifier syntax", line, column);
return null;
}
protected Expr createExprExists(Element element) {
return new E_Exists(element);
}
protected Expr createExprNotExists(Element element) {
// Could negate here.
return new E_NotExists(element);
}
// Convert a parser token, which includes the final ":", to a prefix name.
protected String fixupPrefix(String prefix, int line, int column) {
// \ u processing!
if ( prefix.endsWith(":") )
prefix = prefix.substring(0, prefix.length() - 1);
return prefix;
}
protected void setAccGraph(QuadAccSink acc, Node gn) {
acc.setGraph(gn);
}
protected void insert(TripleCollector acc, Node s, Node p, Node o) {
acc.addTriple(Triple.create(s, p, o));
}
protected void insert(TripleCollectorMark acc, int index, Node s, Node p, Node o) {
acc.addTriple(index, Triple.create(s, p, o));
}
protected void insert(TripleCollector acc, Node s, Node p, Path path, Node o) {
if ( p == null )
acc.addTriplePath(new TriplePath(s, path, o));
else
acc.addTriple(Triple.create(s, p, o));
}
protected void insert(TripleCollectorMark acc, int index, Node s, Node p, Path path, Node o) {
if ( p == null )
acc.addTriplePath(index, new TriplePath(s, path, o));
else
acc.addTriple(index, Triple.create(s, p, o));
}
protected void insert(TripleCollector target, ElementPathBlock source) {
for ( TriplePath path : source.getPattern() ) {
if ( path.isTriple() ) {
target.addTriple(path.asTriple());
} else {
target.addTriplePath(path);
}
}
}
protected Node insertTripleReifier(TripleCollector acc, Node reifierId, Node s, Node p, Node o, int line, int column) {
Node tripleTerm = createTripleTerm(s, p, o, line, column);
if ( reifierId == null )
reifierId = createBNode(line, column);
Triple t = Triple.create(reifierId, nRDFreifies, tripleTerm);
acc.addTriple(t);
return reifierId;
}
private Node annotationReifierId = null;
protected void setReifierId(Node reifId) {
annotationReifierId = reifId;
}
protected Node getOrAllocReifierId(TripleCollector acc, Node s, Node p, Node o, int line, int column) {
if ( annotationReifierId != null )
return annotationReifierId;
Node reifierId = createBNode(-1, -1);
insertTripleReifier(acc, reifierId, s, p, o, line, column);
return reifierId;
}
protected void clearReifierId() {
annotationReifierId = null;
}
protected Expr asExpr(Node n) {
return ExprLib.nodeToExpr(n);
}
// Makers of functions that need more than just a simple "new E_...".
// IRI(rel)
protected Expr makeFunction_IRI(Expr expr) {
return new E_IRI(prologue.getBaseURI(), expr);
}
protected Expr makeFunction_URI(Expr expr) {
return new E_URI(prologue.getBaseURI(), expr);
}
// IRI(base, rel) or IRI(rel, null)
protected Expr makeFunction_IRI(Expr expr1, Expr expr2) {
if ( expr2 == null )
return makeFunction_IRI(expr1);
return new E_IRI2(expr1, prologue.getBaseURI(), expr2);
}
protected Expr makeFunction_URI(Expr expr1, Expr expr2) {
if ( expr2 == null )
return makeFunction_URI(expr1);
return new E_URI2(expr1, prologue.getBaseURI(), expr2);
}
// Create a E_BNode function.
protected Expr makeFunction_BNode() {
return E_BNode.create();
}
protected Expr makeFunction_BNode(Expr expr) {
return E_BNode.create(expr);
}
// Utilities to remove escapes in strings.
/* package-testing */ static String unescapeStr(String s) {
return unescapeStr(s, -1, -1);
}
// Do we need the line/column versions?
// Why not catch exceptions and convert to QueryParseException
protected static String unescapeStr(String s, int line, int column) {
return unescape(s, '\\', false, line, column);
}
/** Unescape unicode - no surrogate processing. */
protected static String unescapeUnicode(String s, int line, int column) {
return unescape(s, '\\', true, line, column);
}
// Worker function
protected static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) {
try {
return EscapeStr.unescape(s, escape, pointCodeOnly);
} catch (AtlasException ex) {
throwParseException(ex.getMessage(), line, column);
return null;
}
}
protected static String unescapePName(String s, int line, int column) {
char escape = '\\';
int idx = s.indexOf(escape);
if ( idx == -1 )
return s;
int len = s.length();
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < len; i++ ) {
char ch = s.charAt(i);
// Keep line and column numbers.
switch (ch) {
case '\n' :
case '\r' :
line++;
column = 1;
break;
default :
column++;
break;
}
if ( ch != escape ) {
sb.append(ch);
continue;
}
// Escape
if ( i >= s.length() - 1 )
throwParseException("Illegal escape at end of string", line, column);
char ch2 = s.charAt(i + 1);
column = column + 1;
i = i + 1;
switch (ch2) { // PN_LOCAL_ESC
case '_' :
case '~' :
case '.' :
case '-' :
case '!' :
case '$' :
case '&' :
case '\'' :
case '(' :
case ')' :
case '*' :
case '+' :
case ',' :
case ';' :
case '=' :
case ':' :
case '/' :
case '?' :
case '#' :
case '@' :
case '%' :
sb.append(ch2);
break;
default :
throwParseException("Illegal prefix name escape: " + ch2, line, column);
}
}
return sb.toString();
}
protected void warnDeprecation(String msg) {
Log.warn(this, msg);
}
public static void throwParseException(String msg, int line, int column) {
throw new QueryParseException("Line " + line + ", column " + column + ": " + msg, line, column);
}
public static void throwParseException(String msg) {
throw new QueryParseException(msg, -1, -1);
}
}
/** Class that has all the parse event operations and other query/update specific things */
static class SPARQLParserBase extends QueryParserBase {
private Deque<Query> stack = new ArrayDeque<>();
protected Query query;
protected SPARQLParserBase() {}
public void setQuery(Query q) {
query = q;
setPrologue(q);
}
public Query getQuery() { return query; }
// The ARQ parser is both query and update languages.
// // ---- SPARQL/Update (Submission)
// private UpdateRequest requestSubmission = null;
//
// protected UpdateRequest getUpdateRequestSubmission() { return requestSubmission; }
// public void setUpdateRequest(UpdateRequest request)
// {
// setPrologue(request);
// this.requestSubmission = request;
// // And create a query because we may have nested selects.
// this.query = new Query ();
// }
private UpdateSink sink = null;
// Places to push settings across points where we reset.
private boolean oldBNodesAreVariables;
private boolean oldBNodesAreAllowed;
// Count of subSelect nesting.
// Level 0 is top level.
// Level -1 is not in a pattern WHERE clause.
private int queryLevel = -1;
private Deque<Set<String>> stackPreviousLabels = new ArrayDeque<>();
private Deque<LabelToNodeMap> stackCurrentLabels = new ArrayDeque<>();
public void setUpdate(Prologue prologue, UpdateSink sink) {
this.sink = sink;
this.query = new Query();
setPrologue(prologue);
}
// Signal start/finish of units
protected void startQuery() {}
protected void finishQuery() {
query.ensureResultVars();
}
protected void startUpdateRequest() {}
protected void finishUpdateRequest() {}
// protected void startBasicGraphPattern()
// { activeLabelMap.clear(); }
//
// protected void endBasicGraphPattern()
// { oldLabels.addAll(activeLabelMap.getLabels()); }
protected void startUpdateOperation() {}
protected void finishUpdateOperation() {}
protected void startModifyUpdate() { }
protected void finishModifyUpdate() { }
protected void startDataInsert(QuadDataAccSink qd, int line, int col) {
oldBNodesAreVariables = getBNodesAreVariables();
setBNodesAreVariables(false);
activeLabelMap.clear();
}
protected void finishDataInsert(QuadDataAccSink qd, int line, int col) {
previousLabels.addAll(activeLabelMap.getLabels());
activeLabelMap.clear();
setBNodesAreVariables(oldBNodesAreVariables);
}
protected void startDataDelete(QuadDataAccSink qd, int line, int col) {
oldBNodesAreAllowed = getBNodesAreAllowed();
setBNodesAreAllowed(false);
}
protected void finishDataDelete(QuadDataAccSink qd, int line, int col) {
setBNodesAreAllowed(oldBNodesAreAllowed);
}
// These can be nested with subSELECTs but subSELECTs share bNodeLabel state.
protected void startWherePattern() {
queryLevel += 1;
if ( queryLevel == 0 ) {
pushLabelState();
clearLabelState();
}
}
protected void finishWherePattern() {
if ( queryLevel == 0 )
popLabelState();
queryLevel -= 1;
}
// This holds the accumulation of labels from earlier INSERT DATA
// across template creation (bNode in templates get cloned before
// going into the data).
protected void startInsertTemplate(QuadAcc qd, int line, int col) {
oldBNodesAreVariables = getBNodesAreVariables();
setBNodesAreVariables(false);
pushLabelState();
}
protected void finishInsertTemplate(QuadAcc qd, int line, int col) {
// Restore accumulated labels.
popLabelState();
// This also set the bnode syntax to node functionality - must be after
// popLabelState.
setBNodesAreVariables(oldBNodesAreVariables);
}
// No bNodes in delete templates.
protected void startDeleteTemplate(QuadAcc qd, int line, int col) {
oldBNodesAreAllowed = getBNodesAreAllowed();
setBNodesAreAllowed(false);
}
protected void finishDeleteTemplate(QuadAcc qd, int line, int col) {
setBNodesAreAllowed(oldBNodesAreAllowed);
}
protected void emitUpdate(Update update) {
// The parser can send null if it already performed an INSERT_DATA or
// DELETE_DATA
if ( null != update ) {
// Verify each operation
verifyUpdate(update);
sink.send(update);
}
}
private static UpdateVisitor v = new UpdateVisitorBase() {
@Override
public void visit(UpdateModify mod) {
SyntaxVarScope.checkElement(mod.getWherePattern());
}
};
private void verifyUpdate(Update update) {
update.visit(v);
}
protected QuadDataAccSink createInsertDataSink() {
return sink.createInsertDataSink();
}
protected QuadDataAccSink createDeleteDataSink() {
return sink.createDeleteDataSink();
}
protected void pushQuery() {
if ( query == null )
throw new ARQInternalErrorException("Parser query object is null");
stack.push(query);
}
protected void startSubSelect(int line, int col) {
pushQuery();
query = newSubQuery(getPrologue());
}
protected Query newSubQuery(Prologue progloue) {
// The parser uses the same prologue throughout the parsing process.
Query subQuery = new Query();
subQuery.setSyntax(query.getSyntax());
return subQuery;
}
protected void popQuery() {
query = stack.pop();
}
protected Query endSubSelect(int line, int column) {
Query subQuery = query;
if ( ! subQuery.isSelectType() )
throwParseException("Subquery not a SELECT query", line, column);
// Sort out SELECT *
subQuery.ensureResultVars();
popQuery();
return subQuery;
}
private List<Var> variables = null;
private List<Binding> values = null;
private BindingBuilder rowBuilder;
private int currentColumn = -1;
// Trailing VALUES.
protected void startValuesClause(int line, int col) {
variables = new ArrayList<>();
values = new ArrayList<>();
rowBuilder = Binding.builder();
}
protected void finishValuesClause(int line, int col)
{
getQuery().setValuesDataBlock(variables, values);
}
// ElementData. VALUES in the WHERE clause.
protected void startInlineData(List<Var> vars, List<Binding> rows, int line, int col) {
variables = vars;
values = rows;
rowBuilder = Binding.builder();
}
protected void finishInlineData(int line, int col)
{}
protected void emitDataBlockVariable(Var v) { variables.add(v); }
protected void startDataBlockValueRow(int line, int col) {
rowBuilder.reset();
currentColumn = -1;
}
protected void emitDataBlockValue(Node n, int line, int col) {
currentColumn++;
if ( currentColumn >= variables.size() )
// Exception will be thrown later when we have the complete row count.
return;
Var v = variables.get(currentColumn);
if ( n != null && ! n.isConcrete() ) {
String msg = QueryParseException.formatMessage("Term is not concrete: "+n, line, col);
throw new QueryParseException(msg, line, col);
}
if ( n != null )
rowBuilder.add(v, n);
}
protected void finishDataBlockValueRow(int line, int col) {
//if ( variables.size() != currentValueRow().size() )
if ( currentColumn+1 != variables.size() )
{
String msg = String.format("Mismatch: %d variables but %d values",variables.size(), currentColumn+1);
msg = QueryParseException.formatMessage(msg, line, col);
throw new QueryParseException(msg, line , col);
}
values.add(rowBuilder.build());
}
private void pushLabelState() {
// Hide used labels already tracked.
stackPreviousLabels.push(previousLabels);
stackCurrentLabels.push(activeLabelMap);
previousLabels = new HashSet<>();
activeLabelMap.clear();
}
private void popLabelState() {
previousLabels = stackPreviousLabels.pop();
activeLabelMap = stackCurrentLabels.pop();
}
private void clearLabelState() {
activeLabelMap.clear();
previousLabels.clear();
}
}
}
|
googleapis/google-api-java-client-services | 38,051 | clients/google-api-services-drive/v2/1.31.0/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String orgUnitId;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @return value or {@code null} for none
*/
public java.lang.String getOrgUnitId() {
return orgUnitId;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @param orgUnitId orgUnitId or {@code null} for none
*/
public Drive setOrgUnitId(java.lang.String orgUnitId) {
this.orgUnitId = orgUnitId;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canResetDriveRestrictions;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanResetDriveRestrictions() {
return canResetDriveRestrictions;
}
/**
* Whether the current user can reset the shared drive restrictions to defaults.
* @param canResetDriveRestrictions canResetDriveRestrictions or {@code null} for none
*/
public Capabilities setCanResetDriveRestrictions(java.lang.Boolean canResetDriveRestrictions) {
this.canResetDriveRestrictions = canResetDriveRestrictions;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
hibernate/hibernate-reactive | 35,249 | hibernate-reactive-core/src/main/java/org/hibernate/reactive/sql/results/graph/entity/internal/ReactiveEntityInitializerImpl.java | /* Hibernate, Relational Persistence for Idiomatic Java
*
* SPDX-License-Identifier: Apache-2.0
* Copyright: Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.reactive.sql.results.graph.entity.internal;
import java.util.concurrent.CompletionStage;
import java.util.function.BiFunction;
import org.hibernate.Hibernate;
import org.hibernate.LockMode;
import org.hibernate.annotations.NotFoundAction;
import org.hibernate.bytecode.enhance.spi.interceptor.EnhancementAsProxyLazinessInterceptor;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityHolder;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.EntityUniqueKey;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.metamodel.mapping.internal.ToOneAttributeMapping;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.proxy.LazyInitializer;
import org.hibernate.proxy.map.MapProxy;
import org.hibernate.reactive.session.ReactiveQueryProducer;
import org.hibernate.reactive.sql.exec.spi.ReactiveRowProcessingState;
import org.hibernate.reactive.sql.results.graph.ReactiveDomainResultsAssembler;
import org.hibernate.reactive.sql.results.graph.ReactiveInitializer;
import org.hibernate.sql.results.graph.AssemblerCreationState;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.DomainResultAssembler;
import org.hibernate.sql.results.graph.Fetch;
import org.hibernate.sql.results.graph.Initializer;
import org.hibernate.sql.results.graph.InitializerData;
import org.hibernate.sql.results.graph.InitializerParent;
import org.hibernate.sql.results.graph.entity.EntityResultGraphNode;
import org.hibernate.sql.results.graph.entity.internal.EntityInitializerImpl;
import org.hibernate.sql.results.jdbc.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.jdbc.spi.RowProcessingState;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.Type;
import static org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer.UNFETCHED_PROPERTY;
import static org.hibernate.engine.internal.ManagedTypeHelper.asPersistentAttributeInterceptable;
import static org.hibernate.engine.internal.ManagedTypeHelper.isPersistentAttributeInterceptable;
import static org.hibernate.loader.internal.CacheLoadHelper.loadFromSecondLevelCache;
import static org.hibernate.metamodel.mapping.ForeignKeyDescriptor.Nature.TARGET;
import static org.hibernate.proxy.HibernateProxy.extractLazyInitializer;
import static org.hibernate.reactive.util.impl.CompletionStages.completedFuture;
import static org.hibernate.reactive.util.impl.CompletionStages.falseFuture;
import static org.hibernate.reactive.util.impl.CompletionStages.loop;
import static org.hibernate.reactive.util.impl.CompletionStages.trueFuture;
import static org.hibernate.reactive.util.impl.CompletionStages.voidFuture;
public class ReactiveEntityInitializerImpl extends EntityInitializerImpl
implements ReactiveInitializer<EntityInitializerImpl.EntityInitializerData> {
public static class ReactiveEntityInitializerData extends EntityInitializerData {
public ReactiveEntityInitializerData(EntityInitializerImpl initializer, RowProcessingState rowProcessingState) {
super( initializer, rowProcessingState );
}
public void setEntityInstanceForNotify(Object instance) {
super.entityInstanceForNotify = instance;
}
public Object getEntityInstanceForNotify() {
return super.entityInstanceForNotify;
}
public EntityPersister getConcreteDescriptor() {
return super.concreteDescriptor;
}
public void setConcreteDescriptor(EntityPersister entityPersister) {
super.concreteDescriptor = entityPersister;
}
public EntityHolder getEntityHolder() {
return super.entityHolder;
}
public void setEntityHolder(EntityHolder entityHolder) {
super.entityHolder = entityHolder;
}
public EntityKey getEntityKey() {
return super.entityKey;
}
public void setEntityKey(EntityKey entityKey) {
super.entityKey = entityKey;
}
public String getUniqueKeyAttributePath() {
return super.uniqueKeyAttributePath;
}
public Type[] getUniqueKeyPropertyTypes() {
return super.uniqueKeyPropertyTypes;
}
public boolean getShallowCached() {
return super.shallowCached;
}
public LockMode getLockMode() {
return super.lockMode;
}
}
public ReactiveEntityInitializerImpl(
EntityResultGraphNode resultDescriptor,
String sourceAlias,
Fetch identifierFetch,
Fetch discriminatorFetch,
DomainResult<?> keyResult,
DomainResult<Object> rowIdResult,
NotFoundAction notFoundAction,
boolean affectedByFilter,
InitializerParent<?> parent,
boolean isResultInitializer,
AssemblerCreationState creationState) {
super(
resultDescriptor,
sourceAlias,
identifierFetch ,
discriminatorFetch,
keyResult,
rowIdResult,
notFoundAction,
affectedByFilter,
parent,
isResultInitializer,
creationState
);
}
@Override
protected void resolveEntityKey(EntityInitializerData original, Object id) {
ReactiveEntityInitializerData data = (ReactiveEntityInitializerData) original;
if ( data.getConcreteDescriptor() == null ) {
data.setConcreteDescriptor( determineConcreteEntityDescriptor(
data.getRowProcessingState(),
getDiscriminatorAssembler(),
getEntityDescriptor()
) );
assert data.getConcreteDescriptor() != null;
}
data.setEntityKey( new EntityKey( id, data.getConcreteDescriptor() ) );
}
@Override
public CompletionStage<Void> reactiveResolveInstance(Object instance, EntityInitializerData original) {
ReactiveEntityInitializerData data = (ReactiveEntityInitializerData) original;
if ( instance == null ) {
setMissing( data );
return voidFuture();
}
data.setInstance( instance );
final LazyInitializer lazyInitializer = extractLazyInitializer( data.getInstance() );
final RowProcessingState rowProcessingState = data.getRowProcessingState();
final SharedSessionContractImplementor session = rowProcessingState.getSession();
if ( lazyInitializer == null ) {
// Entity is most probably initialized
data.setEntityInstanceForNotify( data.getInstance() );
data.setConcreteDescriptor( session.getEntityPersister( null, data.getInstance() ) );
resolveEntityKey( data, data.getConcreteDescriptor().getIdentifier( data.getInstance(), session ) );
data.setEntityHolder( session.getPersistenceContextInternal().getEntityHolder( data.getEntityKey() ) );
if ( data.getEntityHolder() == null ) {
// Entity was most probably removed in the same session without setting the reference to null
return reactiveResolveKey( data )
.thenRun( () -> {
assert data.getState() == State.MISSING;
assert getInitializedPart() instanceof ToOneAttributeMapping
&& ( (ToOneAttributeMapping) getInitializedPart() ).getSideNature() == TARGET;
} );
}
// If the entity initializer is null, we know the entity is fully initialized,
// otherwise it will be initialized by some other initializer
data.setState( data.getEntityHolder().getEntityInitializer() == null ? State.INITIALIZED : State.RESOLVED );
}
else if ( lazyInitializer.isUninitialized() ) {
data.setState( State.RESOLVED );
// Read the discriminator from the result set if necessary
EntityPersister persister = getDiscriminatorAssembler() == null
? getEntityDescriptor()
: determineConcreteEntityDescriptor( rowProcessingState, getDiscriminatorAssembler(), getEntityDescriptor() );
data.setConcreteDescriptor( persister );
assert data.getConcreteDescriptor() != null;
resolveEntityKey( data, lazyInitializer.getIdentifier() );
data.setEntityHolder( session.getPersistenceContextInternal().claimEntityHolderIfPossible(
data.getEntityKey(),
null,
rowProcessingState.getJdbcValuesSourceProcessingState(),
this
) );
// Resolve and potentially create the entity instance
data.setEntityInstanceForNotify( resolveEntityInstance( data ) );
lazyInitializer.setImplementation( data.getEntityInstanceForNotify() );
registerLoadingEntity( data, data.getEntityInstanceForNotify() );
}
else {
data.setState( State.INITIALIZED );
data.setEntityInstanceForNotify( lazyInitializer.getImplementation() );
data.setConcreteDescriptor( session.getEntityPersister( null, data.getEntityInstanceForNotify() ) );
resolveEntityKey( data, lazyInitializer.getIdentifier() );
data.setEntityHolder( session.getPersistenceContextInternal().getEntityHolder( data.getEntityKey() ) );
}
return reactiveInitializeStage( data, rowProcessingState )
.thenCompose( v -> {
upgradeLockMode( data );
if ( data.getState() == State.INITIALIZED ) {
registerReloadedEntity( data );
resolveInstanceSubInitializers( data );
if ( rowProcessingState.needsResolveState() ) {
// We need to read result set values to correctly populate the query cache
resolveState( data );
}
return voidFuture();
}
else {
return reactiveResolveKeySubInitializers( data );
}
} );
}
private CompletionStage<Void> reactiveInitializeStage(
ReactiveEntityInitializerData data,
RowProcessingState rowProcessingState) {
if ( getIdentifierAssembler() != null ) {
final Initializer<?> initializer = getIdentifierAssembler().getInitializer();
if ( initializer != null ) {
if ( initializer instanceof ReactiveInitializer ) {
return ( (ReactiveInitializer<?>) initializer )
.reactiveResolveInstance( data.getEntityKey().getIdentifier(), rowProcessingState );
}
else {
initializer.resolveInstance( data.getEntityKey().getIdentifier(), rowProcessingState );
}
}
}
return voidFuture();
}
@Override
public CompletionStage<Void> reactiveResolveInstance(EntityInitializerData original) {
ReactiveEntityInitializerData data = (ReactiveEntityInitializerData) original;
if ( data.getState() != State.KEY_RESOLVED ) {
return voidFuture();
}
final RowProcessingState rowProcessingState = data.getRowProcessingState();
data.setState( State.RESOLVED );
if ( data.getEntityKey() == null ) {
return assembleId( rowProcessingState )
.thenCompose( id -> {
if ( id == null ) {
setMissing( data );
return voidFuture();
}
resolveEntityKey( data, id );
return postAssembleId( rowProcessingState, data );
} );
}
return postAssembleId( rowProcessingState, data );
}
private CompletionStage<Void> postAssembleId(RowProcessingState rowProcessingState, ReactiveEntityInitializerData data) {
final PersistenceContext persistenceContext = rowProcessingState.getSession().getPersistenceContextInternal();
data.setEntityHolder( persistenceContext.claimEntityHolderIfPossible(
data.getEntityKey(),
null,
rowProcessingState.getJdbcValuesSourceProcessingState(),
this
) );
if ( useEmbeddedIdentifierInstanceAsEntity( data ) ) {
data.setEntityInstanceForNotify( rowProcessingState.getEntityId() );
data.setInstance( data.getEntityInstanceForNotify() );
postResolveInstance( data );
return voidFuture();
}
return reactiveResolveEntityInstance1( data )
.thenAccept( v -> {
if ( data.getUniqueKeyAttributePath() != null ) {
final SharedSessionContractImplementor session = rowProcessingState.getSession();
final EntityPersister concreteDescriptor = getConcreteDescriptor( data );
final EntityUniqueKey euk = new EntityUniqueKey(
concreteDescriptor.getEntityName(),
data.getUniqueKeyAttributePath(),
rowProcessingState.getEntityUniqueKey(),
data.getUniqueKeyPropertyTypes()[concreteDescriptor.getSubclassId()],
session.getFactory()
);
session.getPersistenceContextInternal().addEntity( euk, data.getInstance() );
}
postResolveInstance( data );
} );
}
private CompletionStage<?> assembleId(RowProcessingState rowProcessingState) {
final DomainResultAssembler<?> identifierAssembler = getIdentifierAssembler();
assert identifierAssembler != null;
return identifierAssembler instanceof ReactiveDomainResultsAssembler<?> reactiveAssembler
? reactiveAssembler.reactiveAssemble( (ReactiveRowProcessingState) rowProcessingState )
: completedFuture( identifierAssembler.assemble( rowProcessingState ) );
}
// We could move this method in ORM
private void postResolveInstance(ReactiveEntityInitializerData data) {
if ( data.getInstance() != null ) {
upgradeLockMode( data );
if ( data.getState() == State.INITIALIZED ) {
registerReloadedEntity( data );
if ( data.getRowProcessingState().needsResolveState() ) {
// We need to read result set values to correctly populate the query cache
resolveState( data );
}
}
if ( data.getShallowCached() ) {
initializeSubInstancesFromParent( data );
}
}
}
@Override
public CompletionStage<Void> reactiveInitializeInstance(EntityInitializerData data) {
if ( data.getState() != State.RESOLVED ) {
return voidFuture();
}
if ( !skipInitialization( data ) ) {
assert consistentInstance( data );
return reactiveInitializeEntityInstance( (ReactiveEntityInitializerData) data );
}
data.setState( State.INITIALIZED );
return voidFuture();
}
protected CompletionStage<Void> reactiveInitializeEntityInstance(ReactiveEntityInitializerData data) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
final Object entityIdentifier = data.getEntityKey().getIdentifier();
final SharedSessionContractImplementor session = rowProcessingState.getSession();
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
return reactiveExtractConcreteTypeStateValues( data )
.thenAccept( resolvedEntityState -> {
preLoad( data, resolvedEntityState );
if ( isPersistentAttributeInterceptable( data.getEntityInstanceForNotify() ) ) {
final PersistentAttributeInterceptor persistentAttributeInterceptor =
asPersistentAttributeInterceptable( data.getEntityInstanceForNotify() ).$$_hibernate_getInterceptor();
if ( persistentAttributeInterceptor == null
|| persistentAttributeInterceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
// if we do this after the entity has been initialized the
// BytecodeLazyAttributeInterceptor#isAttributeLoaded(String fieldName) would return false;
data.getConcreteDescriptor().getBytecodeEnhancementMetadata()
.injectInterceptor( data.getEntityInstanceForNotify(), entityIdentifier, session );
}
}
data.getConcreteDescriptor().setPropertyValues( data.getEntityInstanceForNotify(), resolvedEntityState );
persistenceContext.addEntity( data.getEntityKey(), data.getEntityInstanceForNotify() );
// Also register possible unique key entries
registerPossibleUniqueKeyEntries( data, resolvedEntityState, session );
final Object version = getVersionAssembler() != null ? getVersionAssembler().assemble( rowProcessingState ) : null;
final Object rowId = getRowIdAssembler() != null ? getRowIdAssembler().assemble( rowProcessingState ) : null;
// from the perspective of Hibernate, an entity is read locked as soon as it is read
// so regardless of the requested lock mode, we upgrade to at least the read level
final LockMode lockModeToAcquire = data.getLockMode() == LockMode.NONE ? LockMode.READ : data.getLockMode();
final EntityEntry entityEntry = persistenceContext.addEntry(
data.getEntityInstanceForNotify(),
Status.LOADING,
resolvedEntityState,
rowId,
data.getEntityKey().getIdentifier(),
version,
lockModeToAcquire,
true,
data.getConcreteDescriptor(),
false
);
data.getEntityHolder().setEntityEntry( entityEntry );
registerNaturalIdResolution( data, persistenceContext, resolvedEntityState );
takeSnapshot( data, session, persistenceContext, entityEntry, resolvedEntityState );
data.getConcreteDescriptor().afterInitialize( data.getEntityInstanceForNotify(), session );
assert data.getConcreteDescriptor().getIdentifier( data.getEntityInstanceForNotify(), session ) != null;
final StatisticsImplementor statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( !rowProcessingState.isQueryCacheHit() ) {
statistics.loadEntity( data.getConcreteDescriptor().getEntityName() );
}
}
updateCaches(
data,
session,
session.getPersistenceContextInternal(),
resolvedEntityState,
version
);
} );
}
protected CompletionStage<Object[]> reactiveExtractConcreteTypeStateValues(ReactiveEntityInitializerData data) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
final Object[] values = new Object[data.getConcreteDescriptor().getNumberOfAttributeMappings()];
final DomainResultAssembler<?>[] concreteAssemblers = getAssemblers()[data.getConcreteDescriptor().getSubclassId()];
return loop( 0, values.length, i -> {
final DomainResultAssembler<?> assembler = concreteAssemblers[i];
if ( assembler instanceof ReactiveEntityAssembler ) {
return ( (ReactiveEntityAssembler) assembler )
.reactiveAssemble( (ReactiveRowProcessingState) rowProcessingState )
.thenAccept( assembled -> values[i] = assembled );
}
values[i] = assembler == null ? UNFETCHED_PROPERTY : assembler.assemble( rowProcessingState );
return voidFuture();
} ).thenApply( v -> values );
}
protected CompletionStage<Void> reactiveResolveEntityInstance1(ReactiveEntityInitializerData data) {
final Object proxy = data.getEntityHolder().getProxy();
final boolean unwrapProxy = proxy != null && getInitializedPart() instanceof ToOneAttributeMapping
&& ( (ToOneAttributeMapping) getInitializedPart() ).isUnwrapProxy()
&& getConcreteDescriptor( data ).getBytecodeEnhancementMetadata().isEnhancedForLazyLoading();
final Object entityFromExecutionContext;
if ( !unwrapProxy && isProxyInstance( proxy ) ) {
if ( ( entityFromExecutionContext = getEntityFromExecutionContext( data ) ) != null ) {
data.setEntityInstanceForNotify( entityFromExecutionContext );
data.setInstance( data.getEntityInstanceForNotify() );
// If the entity comes from the execution context, it is treated as not initialized
// so that we can refresh the data as requested
registerReloadedEntity( data );
}
else {
data.setInstance( proxy );
if ( Hibernate.isInitialized( data.getInstance() ) ) {
data.setState( State.INITIALIZED );
data.setEntityInstanceForNotify( Hibernate.unproxy( data.getInstance() ) );
}
else {
final LazyInitializer lazyInitializer = extractLazyInitializer( data.getInstance() );
assert lazyInitializer != null;
return reactiveResolveEntityInstance2( data )
.thenAccept( entityInstance -> {
data.setEntityInstanceForNotify( entityInstance );
lazyInitializer.setImplementation( data.getEntityInstanceForNotify() );
ensureEntityIsInitialized( data );
} );
}
}
}
else {
final Object existingEntity = data.getEntityHolder().getEntity();
if ( existingEntity != null ) {
data.setEntityInstanceForNotify( existingEntity );
data.setInstance( data.getEntityInstanceForNotify() );
if ( data.getEntityHolder().getEntityInitializer() == null ) {
assert data.getEntityHolder().isInitialized() == isExistingEntityInitialized( existingEntity );
if ( data.getEntityHolder().isInitialized() ) {
data.setState( State.INITIALIZED );
}
else if ( isResultInitializer() ) {
registerLoadingEntity( data, data.getInstance() );
}
}
else if ( data.getEntityHolder().getEntityInitializer() != this ) {
data.setState( State.INITIALIZED );
}
}
else if ( ( entityFromExecutionContext = getEntityFromExecutionContext( data ) ) != null ) {
// This is the entity to refresh, so don't set the state to initialized
data.setEntityInstanceForNotify( entityFromExecutionContext );
data.setInstance( data.getEntityInstanceForNotify() );
if ( isResultInitializer() ) {
registerLoadingEntity( data, data.getInstance() );
}
}
else {
assert data.getEntityHolder().getEntityInitializer() == this;
// look to see if another initializer from a parent load context or an earlier
// initializer is already loading the entity
return reactiveResolveEntityInstance2( data )
.thenAccept( entityInstance -> {
data.setEntityInstanceForNotify( entityInstance );
data.setInstance( data.getEntityInstanceForNotify() );
final Initializer<?> idInitializer;
if ( data.getEntityHolder().getEntityInitializer() == this && data.getState() != State.INITIALIZED
&& getIdentifierAssembler() != null
&& ( idInitializer = getIdentifierAssembler().getInitializer() ) != null ) {
// If this is the owning initializer and the returned object is not initialized,
// this means that the entity instance was just instantiated.
// In this case, we want to call "assemble" and hence "initializeInstance" on the initializer
// for possibly non-aggregated identifier mappings, so inject the virtual id representation
idInitializer.initializeInstance( data.getRowProcessingState() );
}
ensureEntityIsInitialized( data );
} );
}
}
ensureEntityIsInitialized( data );
return voidFuture();
}
private void ensureEntityIsInitialized(ReactiveEntityInitializerData data) {
// todo: ensure we initialize the entity
assert !data.getShallowCached() || data.getState() == State.INITIALIZED : "Forgot to initialize the entity";
}
protected CompletionStage<Object> reactiveResolveEntityInstance2(ReactiveEntityInitializerData data) {
if ( data.getEntityHolder().getEntityInitializer() == this ) {
assert data.getEntityHolder().getEntity() == null;
return reactiveResolveEntityInstance( data );
}
else {
// the entity is already being loaded elsewhere
return completedFuture( data.getEntityHolder().getEntity() );
}
}
protected CompletionStage<Object> reactiveResolveEntityInstance(ReactiveEntityInitializerData data) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
final Object resolved = resolveToOptionalInstance( data );
if ( resolved != null ) {
registerLoadingEntity( data, resolved );
return completedFuture( resolved );
}
else {
if ( rowProcessingState.isQueryCacheHit() && getEntityDescriptor().useShallowQueryCacheLayout() ) {
// We must load the entity this way, because the query cache entry contains only the primary key
data.setState( State.INITIALIZED );
final SharedSessionContractImplementor session = rowProcessingState.getSession();
assert data.getEntityHolder().getEntityInitializer() == this;
// If this initializer owns the entity, we have to remove the entity holder,
// because the subsequent loading process will claim the entity
session.getPersistenceContextInternal().removeEntityHolder( data.getEntityKey() );
return ( (ReactiveQueryProducer) session ).reactiveInternalLoad(
data.getConcreteDescriptor().getEntityName(),
data.getEntityKey().getIdentifier(),
true,
false
);
}
// We have to query the second level cache if reference cache entries are used
else if ( getEntityDescriptor().canUseReferenceCacheEntries() ) {
final Object cached = resolveInstanceFromCache( data );
if ( cached != null ) {
// EARLY EXIT!!!
// because the second level cache has reference cache entries, the entity is initialized
data.setState( State.INITIALIZED );
return completedFuture( cached );
}
}
final Object instance = instantiateEntity( data );
registerLoadingEntity( data, instance );
return completedFuture( instance );
}
}
// FIXME: I could change the scope of this method in ORM
private Object resolveToOptionalInstance(ReactiveEntityInitializerData data) {
if ( isResultInitializer() ) {
// this isEntityReturn bit is just for entity loaders, not hql/criteria
final JdbcValuesSourceProcessingOptions processingOptions =
data.getRowProcessingState().getJdbcValuesSourceProcessingState().getProcessingOptions();
return matchesOptionalInstance( data, processingOptions ) ? processingOptions.getEffectiveOptionalObject() : null;
}
else {
return null;
}
}
// FIXME: I could change the scope of this method in ORM
private boolean isProxyInstance(Object proxy) {
return proxy != null
&& ( proxy instanceof MapProxy || getEntityDescriptor().getJavaType().getJavaTypeClass().isInstance( proxy ) );
}
// FIXME: I could change the scope of this method in ORM
private Object resolveInstanceFromCache(ReactiveEntityInitializerData data) {
return loadFromSecondLevelCache(
data.getRowProcessingState().getSession().asEventSource(),
null,
data.getLockMode(),
getEntityDescriptor(),
data.getEntityKey()
);
}
// FIXME: I could change the scope of this method in ORM
private boolean matchesOptionalInstance(
ReactiveEntityInitializerData data,
JdbcValuesSourceProcessingOptions processingOptions) {
final Object optionalEntityInstance = processingOptions.getEffectiveOptionalObject();
final Object requestedEntityId = processingOptions.getEffectiveOptionalId();
return requestedEntityId != null
&& optionalEntityInstance != null
&& requestedEntityId.equals( data.getEntityKey().getIdentifier() );
}
private boolean isExistingEntityInitialized(Object existingEntity) {
return Hibernate.isInitialized( existingEntity );
}
@Override
public CompletionStage<Void> reactiveResolveKey(EntityInitializerData data) {
return reactiveResolveKey( (ReactiveEntityInitializerData) data, false );
}
protected CompletionStage<Void> reactiveResolveKey(ReactiveEntityInitializerData data, boolean entityKeyOnly) {
// todo (6.0) : atm we do not handle sequential selects
// - see AbstractEntityPersister#hasSequentialSelect and
// AbstractEntityPersister#getSequentialSelect in 5.2
if ( data.getState() != State.UNINITIALIZED ) {
return voidFuture();
}
data.setState( State.KEY_RESOLVED );
// reset row state
data.setConcreteDescriptor( null );
data.setEntityKey( null );
data.setInstance( null );
data.setEntityInstanceForNotify( null );
data.setEntityHolder( null );
final Object[] id = new Object[1];
return initializeId( data, id, entityKeyOnly )
.thenCompose( initialized -> {
if ( initialized ) {
resolveEntityKey( data, id[0] );
if ( !entityKeyOnly ) {
// Resolve the entity instance early as we have no key many-to-one
return reactiveResolveInstance( data )
.thenCompose( v -> {
if ( !data.getShallowCached() ) {
if ( data.getState() == State.INITIALIZED ) {
if ( data.getEntityHolder().getEntityInitializer() == null ) {
// The entity is already part of the persistence context,
// so let's figure out the loaded state and only run sub-initializers if necessary
return reactiveResolveInstanceSubInitializers( data );
}
// If the entity is initialized and getEntityInitializer() == this,
// we already processed a row for this entity before,
// but we still have to call resolveKeySubInitializers to activate sub-initializers,
// because a row might contain data that sub-initializers want to consume
else {
// todo: try to diff the eagerness of the sub-initializers to avoid further processing
return reactiveResolveKeySubInitializers( data );
}
}
else {
return reactiveResolveKeySubInitializers( data );
}
}
return voidFuture();
} );
}
}
return voidFuture();
} );
}
protected CompletionStage<Void> reactiveResolveInstanceSubInitializers(ReactiveEntityInitializerData data) {
final Initializer<?>[] initializers = getSubInitializers()[data.getConcreteDescriptor().getSubclassId()];
if ( initializers.length == 0 ) {
return voidFuture();
}
final EntityEntry entityEntry = data.getEntityHolder().getEntityEntry();
final RowProcessingState rowProcessingState = data.getRowProcessingState();
assert entityEntry == rowProcessingState.getSession()
.getPersistenceContextInternal()
.getEntry( data.getEntityInstanceForNotify() );
final Object[] loadedState = entityEntry.getLoadedState();
final Object[] state;
if ( loadedState == null ) {
if ( entityEntry.getStatus() == Status.READ_ONLY ) {
state = data.getConcreteDescriptor().getValues( data.getEntityInstanceForNotify() );
}
else {
// This branch is entered when a load happens while a cache entry is assembling.
// The EntityEntry has the LOADING state, but the loaded state is still empty.
assert entityEntry.getStatus() == Status.LOADING;
// Just skip any initialization in this case as the cache entry assembling will take care of it
return voidFuture();
}
}
else {
state = loadedState;
}
return loop( 0, initializers.length, i -> {
final Initializer<?> initializer = initializers[i];
if ( initializer != null ) {
final Object subInstance = state[i];
if ( subInstance == UNFETCHED_PROPERTY ) {
if ( initializer instanceof ReactiveInitializer ) {
return ( (ReactiveInitializer<?>) initializer )
.reactiveResolveKey( rowProcessingState );
}
else {
// Go through the normal initializer process
initializer.resolveKey( rowProcessingState );
}
}
else {
if ( initializer instanceof ReactiveInitializer ) {
return ( (ReactiveInitializer<?>) initializer )
.reactiveResolveInstance( subInstance, rowProcessingState );
}
else {
initializer.resolveInstance( subInstance, rowProcessingState );
}
}
}
return voidFuture();
} );
}
protected CompletionStage<Void> reactiveResolveKeySubInitializers(ReactiveEntityInitializerData data) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
return loop(
getSubInitializers()[data.getConcreteDescriptor().getSubclassId()],
initializer -> {
if ( initializer != null ) {
if ( initializer instanceof ReactiveInitializer ) {
return ( (ReactiveInitializer<?>) initializer ).reactiveResolveKey( rowProcessingState );
}
initializer.resolveKey( rowProcessingState );
}
return voidFuture();
}
);
}
/**
* Return {@code true} if the identifier has been initialized
*/
private CompletionStage<Boolean> initializeId(ReactiveEntityInitializerData data, Object[] id, boolean entityKeyOnly) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
if ( getIdentifierAssembler() == null ) {
id[0] = rowProcessingState.getEntityId();
assert id[0] != null : "Initializer requires a not null id for loading";
return trueFuture();
}
else {
//noinspection unchecked
final Initializer<InitializerData> initializer = (Initializer<InitializerData>) getIdentifierAssembler().getInitializer();
if ( initializer != null ) {
final InitializerData subData = initializer.getData( rowProcessingState );
return ( (ReactiveInitializer<InitializerData>) initializer )
.reactiveResolveKey( subData )
.thenCompose( v -> {
if ( subData.getState() == State.MISSING ) {
setMissing( data );
return falseFuture();
}
else {
data.setConcreteDescriptor( determineConcreteEntityDescriptor(
rowProcessingState,
getDiscriminatorAssembler(),
getEntityDescriptor()
) );
assert data.getConcreteDescriptor() != null;
if ( isKeyManyToOne() ) {
if ( !data.getShallowCached() && !entityKeyOnly ) {
resolveKeySubInitializers( data );
}
return falseFuture();
}
}
id[0] = getIdentifierAssembler().assemble( rowProcessingState );
if ( id[0] == null ) {
setMissing( data );
return falseFuture();
}
return trueFuture();
} );
}
id[0] = getIdentifierAssembler().assemble( rowProcessingState );
if ( id[0] == null ) {
setMissing( data );
return falseFuture();
}
return trueFuture();
}
}
@Override
protected EntityInitializerData createInitializerData(RowProcessingState rowProcessingState) {
return new ReactiveEntityInitializerData( this, rowProcessingState );
}
@Override
public CompletionStage<Void> forEachReactiveSubInitializer(
BiFunction<ReactiveInitializer<?>, RowProcessingState, CompletionStage<Void>> consumer,
InitializerData data) {
final RowProcessingState rowProcessingState = data.getRowProcessingState();
return voidFuture()
.thenCompose( v -> {
if ( getKeyAssembler() != null ) {
final Initializer<?> initializer = getKeyAssembler().getInitializer();
if ( initializer != null ) {
return consumer.apply( (ReactiveInitializer<?>) initializer, rowProcessingState );
}
}
return voidFuture();
} )
.thenCompose( v -> {
if ( getIdentifierAssembler() != null ) {
final Initializer<?> initializer = getIdentifierAssembler().getInitializer();
if ( initializer != null ) {
consumer.apply( (ReactiveInitializer<?>) initializer, rowProcessingState );
}
}
return voidFuture();
} )
.thenCompose( v -> {
final ReactiveEntityInitializerDataAdaptor entityInitializerData = new ReactiveEntityInitializerDataAdaptor(
(EntityInitializerData) data );
if ( entityInitializerData.getConcreteDescriptor() == null ) {
return loop( getSubInitializers(), initializers ->
loop( initializers, initializer -> {
if ( initializer != null ) {
return consumer.apply( (ReactiveInitializer<?>) initializer, rowProcessingState );
}
return voidFuture();
} )
);
}
else {
Initializer<?>[] subInitializers = getSubInitializers()[entityInitializerData.getConcreteDescriptor()
.getSubclassId()];
return loop( subInitializers, initializer -> consumer
.apply( (ReactiveInitializer<?>) initializer, rowProcessingState )
);
}
} );
}
private static class ReactiveEntityInitializerDataAdaptor extends EntityInitializerData {
public ReactiveEntityInitializerDataAdaptor(EntityInitializerData delegate) {
super( delegate );
}
public EntityPersister getConcreteDescriptor() {
return concreteDescriptor;
}
}
@Override
public Object getResolvedInstance(EntityInitializerData data) {
return super.getResolvedInstance( data );
}
}
|
googleapis/google-cloud-java | 38,004 | java-certificate-manager/proto-google-cloud-certificate-manager-v1/src/main/java/com/google/cloud/certificatemanager/v1/UpdateCertificateMapEntryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/certificatemanager/v1/certificate_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.certificatemanager.v1;
/**
*
*
* <pre>
* Request for the `UpdateCertificateMapEntry` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest}
*/
public final class UpdateCertificateMapEntryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest)
UpdateCertificateMapEntryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateCertificateMapEntryRequest.newBuilder() to construct.
private UpdateCertificateMapEntryRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateCertificateMapEntryRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateCertificateMapEntryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapEntryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapEntryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest.class,
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest.Builder.class);
}
private int bitField0_;
public static final int CERTIFICATE_MAP_ENTRY_FIELD_NUMBER = 1;
private com.google.cloud.certificatemanager.v1.CertificateMapEntry certificateMapEntry_;
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the certificateMapEntry field is set.
*/
@java.lang.Override
public boolean hasCertificateMapEntry() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The certificateMapEntry.
*/
@java.lang.Override
public com.google.cloud.certificatemanager.v1.CertificateMapEntry getCertificateMapEntry() {
return certificateMapEntry_ == null
? com.google.cloud.certificatemanager.v1.CertificateMapEntry.getDefaultInstance()
: certificateMapEntry_;
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.certificatemanager.v1.CertificateMapEntryOrBuilder
getCertificateMapEntryOrBuilder() {
return certificateMapEntry_ == null
? com.google.cloud.certificatemanager.v1.CertificateMapEntry.getDefaultInstance()
: certificateMapEntry_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCertificateMapEntry());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCertificateMapEntry());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest)) {
return super.equals(obj);
}
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest other =
(com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest) obj;
if (hasCertificateMapEntry() != other.hasCertificateMapEntry()) return false;
if (hasCertificateMapEntry()) {
if (!getCertificateMapEntry().equals(other.getCertificateMapEntry())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCertificateMapEntry()) {
hash = (37 * hash) + CERTIFICATE_MAP_ENTRY_FIELD_NUMBER;
hash = (53 * hash) + getCertificateMapEntry().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateCertificateMapEntry` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest)
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapEntryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapEntryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest.class,
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest.Builder
.class);
}
// Construct using
// com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCertificateMapEntryFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
certificateMapEntry_ = null;
if (certificateMapEntryBuilder_ != null) {
certificateMapEntryBuilder_.dispose();
certificateMapEntryBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapEntryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
getDefaultInstanceForType() {
return com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest build() {
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest buildPartial() {
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest result =
new com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.certificateMapEntry_ =
certificateMapEntryBuilder_ == null
? certificateMapEntry_
: certificateMapEntryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest) {
return mergeFrom(
(com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest other) {
if (other
== com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
.getDefaultInstance()) return this;
if (other.hasCertificateMapEntry()) {
mergeCertificateMapEntry(other.getCertificateMapEntry());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getCertificateMapEntryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.certificatemanager.v1.CertificateMapEntry certificateMapEntry_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.CertificateMapEntry,
com.google.cloud.certificatemanager.v1.CertificateMapEntry.Builder,
com.google.cloud.certificatemanager.v1.CertificateMapEntryOrBuilder>
certificateMapEntryBuilder_;
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the certificateMapEntry field is set.
*/
public boolean hasCertificateMapEntry() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The certificateMapEntry.
*/
public com.google.cloud.certificatemanager.v1.CertificateMapEntry getCertificateMapEntry() {
if (certificateMapEntryBuilder_ == null) {
return certificateMapEntry_ == null
? com.google.cloud.certificatemanager.v1.CertificateMapEntry.getDefaultInstance()
: certificateMapEntry_;
} else {
return certificateMapEntryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCertificateMapEntry(
com.google.cloud.certificatemanager.v1.CertificateMapEntry value) {
if (certificateMapEntryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
certificateMapEntry_ = value;
} else {
certificateMapEntryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCertificateMapEntry(
com.google.cloud.certificatemanager.v1.CertificateMapEntry.Builder builderForValue) {
if (certificateMapEntryBuilder_ == null) {
certificateMapEntry_ = builderForValue.build();
} else {
certificateMapEntryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeCertificateMapEntry(
com.google.cloud.certificatemanager.v1.CertificateMapEntry value) {
if (certificateMapEntryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& certificateMapEntry_ != null
&& certificateMapEntry_
!= com.google.cloud.certificatemanager.v1.CertificateMapEntry
.getDefaultInstance()) {
getCertificateMapEntryBuilder().mergeFrom(value);
} else {
certificateMapEntry_ = value;
}
} else {
certificateMapEntryBuilder_.mergeFrom(value);
}
if (certificateMapEntry_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearCertificateMapEntry() {
bitField0_ = (bitField0_ & ~0x00000001);
certificateMapEntry_ = null;
if (certificateMapEntryBuilder_ != null) {
certificateMapEntryBuilder_.dispose();
certificateMapEntryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.certificatemanager.v1.CertificateMapEntry.Builder
getCertificateMapEntryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCertificateMapEntryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.certificatemanager.v1.CertificateMapEntryOrBuilder
getCertificateMapEntryOrBuilder() {
if (certificateMapEntryBuilder_ != null) {
return certificateMapEntryBuilder_.getMessageOrBuilder();
} else {
return certificateMapEntry_ == null
? com.google.cloud.certificatemanager.v1.CertificateMapEntry.getDefaultInstance()
: certificateMapEntry_;
}
}
/**
*
*
* <pre>
* Required. A definition of the certificate map entry to create map entry.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.CertificateMapEntry certificate_map_entry = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.CertificateMapEntry,
com.google.cloud.certificatemanager.v1.CertificateMapEntry.Builder,
com.google.cloud.certificatemanager.v1.CertificateMapEntryOrBuilder>
getCertificateMapEntryFieldBuilder() {
if (certificateMapEntryBuilder_ == null) {
certificateMapEntryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.CertificateMapEntry,
com.google.cloud.certificatemanager.v1.CertificateMapEntry.Builder,
com.google.cloud.certificatemanager.v1.CertificateMapEntryOrBuilder>(
getCertificateMapEntry(), getParentForChildren(), isClean());
certificateMapEntry_ = null;
}
return certificateMapEntryBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest)
private static final com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest();
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateCertificateMapEntryRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateCertificateMapEntryRequest>() {
@java.lang.Override
public UpdateCertificateMapEntryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateCertificateMapEntryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateCertificateMapEntryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateMapEntryRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 36,043 | hibernate-core/src/main/java/org/hibernate/boot/model/process/spi/MetadataBuildingProcess.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.boot.model.process.spi;
import java.io.InputStream;
import java.sql.Types;
import java.time.Duration;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.hibernate.AssertionFailure;
import org.hibernate.Internal;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.internal.InFlightMetadataCollectorImpl;
import org.hibernate.boot.internal.MetadataBuildingContextRootImpl;
import org.hibernate.boot.internal.RootMappingDefaults;
import org.hibernate.boot.jaxb.Origin;
import org.hibernate.boot.jaxb.SourceType;
import org.hibernate.boot.jaxb.hbm.spi.JaxbHbmHibernateMapping;
import org.hibernate.boot.jaxb.internal.MappingBinder;
import org.hibernate.boot.jaxb.mapping.spi.JaxbEntityMappingsImpl;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.boot.model.TypeContributor;
import org.hibernate.boot.model.process.internal.ManagedResourcesImpl;
import org.hibernate.boot.model.process.internal.ScanningCoordinator;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.model.source.internal.annotations.AnnotationMetadataSourceProcessorImpl;
import org.hibernate.boot.model.source.internal.annotations.DomainModelSource;
import org.hibernate.boot.model.source.internal.hbm.EntityHierarchyBuilder;
import org.hibernate.boot.model.source.internal.hbm.HbmMetadataSourceProcessorImpl;
import org.hibernate.boot.model.source.internal.hbm.MappingDocument;
import org.hibernate.boot.model.source.internal.hbm.ModelBinder;
import org.hibernate.boot.model.source.spi.MetadataSourceProcessor;
import org.hibernate.boot.models.internal.DomainModelCategorizationCollector;
import org.hibernate.boot.models.xml.spi.XmlPreProcessor;
import org.hibernate.boot.models.xml.spi.XmlProcessor;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.spi.AdditionalMappingContributions;
import org.hibernate.boot.spi.AdditionalMappingContributor;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.EffectiveMappingDefaults;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MappingDefaults;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.mapping.Table;
import org.hibernate.models.internal.MutableClassDetailsRegistry;
import org.hibernate.models.spi.ClassDetails;
import org.hibernate.models.spi.ClassDetailsRegistry;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.WrapperArrayHandling;
import org.hibernate.type.descriptor.java.ByteArrayJavaType;
import org.hibernate.type.descriptor.java.CharacterArrayJavaType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonAsStringArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.XmlAsStringArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.XmlAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.UuidAsBinaryJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.internal.NamedBasicTypeImpl;
import org.hibernate.type.spi.TypeConfiguration;
import org.hibernate.usertype.CompositeUserType;
import jakarta.persistence.AttributeConverter;
import static org.hibernate.cfg.MappingSettings.XML_MAPPING_ENABLED;
import static org.hibernate.internal.util.collections.CollectionHelper.mutableJoin;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForArray;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForDuration;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForInstant;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForUuid;
/**
* Represents the process of transforming a {@link MetadataSources}
* reference into a {@link org.hibernate.boot.Metadata} reference. Allows for 2 different process paradigms:<ul>
* <li>
* Single step : as defined by the {@link #build} method; internally leverages the 2-step paradigm
* </li>
* <li>
* Two step : a first step coordinates resource scanning and some other preparation work; a second step
* builds the {@link org.hibernate.boot.Metadata}. A hugely important distinction in the need for the
* steps is that the first phase should strive to not load user entity/component classes so that we can still
* perform enhancement on them later. This approach caters to the 2-phase bootstrap we use in regard to
* WildFly Hibernate-JPA integration. The first step is defined by {@link #prepare} which returns
* a {@link ManagedResources} instance. The second step is defined by calling {@link #complete}
* </li>
* </ul>
*
* @author Steve Ebersole
*/
public class MetadataBuildingProcess {
/**
* Unified single phase for MetadataSources to Metadata process
*
* @param sources The MetadataSources
* @param options The building options
*
* @return The built Metadata
*/
public static MetadataImplementor build(
final MetadataSources sources,
final BootstrapContext bootstrapContext,
final MetadataBuildingOptions options) {
return complete( prepare( sources, bootstrapContext ), bootstrapContext, options );
}
/**
* First step of two-phase for MetadataSources to Metadata process
*
* @param sources The MetadataSources
* @param bootstrapContext The bootstrapContext
*
* @return Token/memento representing all known users resources (classes, packages, mapping files, etc).
*/
public static ManagedResources prepare(
final MetadataSources sources,
final BootstrapContext bootstrapContext) {
final var managedResources = ManagedResourcesImpl.baseline( sources, bootstrapContext );
final boolean xmlMappingEnabled =
bootstrapContext.getConfigurationService()
.getSetting( XML_MAPPING_ENABLED, StandardConverters.BOOLEAN, true );
ScanningCoordinator.INSTANCE.coordinateScan(
managedResources,
bootstrapContext,
xmlMappingEnabled ? sources.getXmlMappingBinderAccess() : null
);
return managedResources;
}
/**
* Second step of two-phase for MetadataSources to Metadata process
*
* @param managedResources The token/memento from 1st phase
* @param options The building options
*
* @return Token/memento representing all known users resources (classes, packages, mapping files, etc).
*/
public static MetadataImplementor complete(
final ManagedResources managedResources,
final BootstrapContext bootstrapContext,
final MetadataBuildingOptions options) {
final var metadataCollector = new InFlightMetadataCollectorImpl( bootstrapContext, options );
handleTypes( bootstrapContext, options, metadataCollector );
final var domainModelSource = processManagedResources(
managedResources,
metadataCollector,
bootstrapContext,
options.getMappingDefaults()
);
final var rootMetadataBuildingContext = new MetadataBuildingContextRootImpl(
"orm",
bootstrapContext,
options,
metadataCollector,
domainModelSource.getEffectiveMappingDefaults()
);
managedResources.getAttributeConverterDescriptors().forEach( metadataCollector::addAttributeConverter );
bootstrapContext.getTypeConfiguration().scope( rootMetadataBuildingContext );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Set up the processors and start binding
// NOTE : this becomes even more simplified after we move purely
// to unified model
// final IndexView jandexView = domainModelSource.getJandexIndex();
coordinateProcessors(
managedResources,
options,
rootMetadataBuildingContext,
domainModelSource,
metadataCollector
);
final var classLoaderService = bootstrapContext.getClassLoaderService();
processAdditionalMappingContributions( metadataCollector, options, classLoaderService, rootMetadataBuildingContext );
applyExtraQueryImports( managedResources, metadataCollector );
return metadataCollector.buildMetadataInstance( rootMetadataBuildingContext );
}
@Internal
public static void coordinateProcessors(
ManagedResources managedResources,
MetadataBuildingOptions options,
MetadataBuildingContextRootImpl rootMetadataBuildingContext,
DomainModelSource domainModelSource,
InFlightMetadataCollectorImpl metadataCollector) {
final var processor = new MetadataSourceProcessor() {
private final MetadataSourceProcessor hbmProcessor = options.isXmlMappingEnabled()
? new HbmMetadataSourceProcessorImpl( managedResources, rootMetadataBuildingContext )
: new NoOpMetadataSourceProcessorImpl();
private final AnnotationMetadataSourceProcessorImpl annotationProcessor =
new AnnotationMetadataSourceProcessorImpl(
managedResources,
domainModelSource,
rootMetadataBuildingContext
);
@Override
public void prepare() {
hbmProcessor.prepare();
annotationProcessor.prepare();
}
@Override
public void processTypeDefinitions() {
hbmProcessor.processTypeDefinitions();
annotationProcessor.processTypeDefinitions();
}
@Override
public void processQueryRenames() {
hbmProcessor.processQueryRenames();
annotationProcessor.processQueryRenames();
}
@Override
public void processNamedQueries() {
hbmProcessor.processNamedQueries();
annotationProcessor.processNamedQueries();
}
@Override
public void processAuxiliaryDatabaseObjectDefinitions() {
hbmProcessor.processAuxiliaryDatabaseObjectDefinitions();
annotationProcessor.processAuxiliaryDatabaseObjectDefinitions();
}
@Override
public void processIdentifierGenerators() {
hbmProcessor.processIdentifierGenerators();
annotationProcessor.processIdentifierGenerators();
}
@Override
public void processFilterDefinitions() {
hbmProcessor.processFilterDefinitions();
annotationProcessor.processFilterDefinitions();
}
@Override
public void processFetchProfiles() {
hbmProcessor.processFetchProfiles();
annotationProcessor.processFetchProfiles();
}
@Override
public void prepareForEntityHierarchyProcessing() {
hbmProcessor.prepareForEntityHierarchyProcessing();
annotationProcessor.prepareForEntityHierarchyProcessing();
}
@Override
public void processEntityHierarchies(Set<String> processedEntityNames) {
hbmProcessor.processEntityHierarchies( processedEntityNames );
annotationProcessor.processEntityHierarchies( processedEntityNames );
}
@Override
public void postProcessEntityHierarchies() {
hbmProcessor.postProcessEntityHierarchies();
annotationProcessor.postProcessEntityHierarchies();
}
@Override
public void processResultSetMappings() {
hbmProcessor.processResultSetMappings();
annotationProcessor.processResultSetMappings();
}
@Override
public void finishUp() {
hbmProcessor.finishUp();
annotationProcessor.finishUp();
}
};
processor.prepare();
processor.processTypeDefinitions();
processor.processQueryRenames();
processor.processAuxiliaryDatabaseObjectDefinitions();
processor.processIdentifierGenerators();
processor.processFilterDefinitions();
processor.processFetchProfiles();
final Set<String> processedEntityNames = new HashSet<>();
processor.prepareForEntityHierarchyProcessing();
processor.processEntityHierarchies( processedEntityNames );
processor.postProcessEntityHierarchies();
processor.processResultSetMappings();
metadataCollector.processSecondPasses( rootMetadataBuildingContext );
// Make sure collections are fully bound before processing named queries as hbm result set mappings require it
processor.processNamedQueries();
processor.finishUp();
}
@Internal
public static DomainModelSource processManagedResources(
ManagedResources managedResources,
InFlightMetadataCollector metadataCollector,
BootstrapContext bootstrapContext,
MappingDefaults optionDefaults) {
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// - pre-process the XML
// - collect all known classes
// - resolve (possibly building) Jandex index
// - build the ModelsContext
//
// INPUTS:
// - serviceRegistry
// - managedResources
// - bootstrapContext (supplied Jandex index, if one)
//
// OUTPUTS:
// - xmlPreProcessingResult
// - allKnownClassNames (technically could be included in xmlPreProcessingResult)
// - ModelsContext
final var aggregatedPersistenceUnitMetadata = metadataCollector.getPersistenceUnitMetadata();
final var modelsContext = bootstrapContext.getModelsContext();
final var xmlPreProcessingResult = XmlPreProcessor.preProcessXmlResources(
managedResources,
aggregatedPersistenceUnitMetadata
);
final List<String> allKnownClassNames = mutableJoin(
managedResources.getAnnotatedClassReferences().stream().map( Class::getName ).collect( Collectors.toList() ),
managedResources.getAnnotatedClassNames(),
xmlPreProcessingResult.getMappedClasses()
);
managedResources.getAnnotatedPackageNames().forEach( (packageName) -> {
try {
final var packageInfoClass = modelsContext.getClassLoading().classForName( packageName + ".package-info" );
allKnownClassNames.add( packageInfoClass.getName() );
}
catch (ClassLoadingException classLoadingException) {
// no package-info, so there can be no annotations... just skip it
}
} );
managedResources.getAnnotatedClassReferences().forEach( (clazz) -> allKnownClassNames.add( clazz.getName() ) );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// - process metadata-complete XML
// - collect overlay XML
// - process annotations (including those from metadata-complete XML)
// - apply overlay XML
//
// INPUTS:
// - "options" (areIdGeneratorsGlobal, etc)
// - xmlPreProcessingResult
// - ModelsContext
//
// OUTPUTS
// - rootEntities
// - mappedSuperClasses
// - embeddables
final var classDetailsRegistry = modelsContext.getClassDetailsRegistry();
final var modelCategorizationCollector = new DomainModelCategorizationCollector(
metadataCollector.getGlobalRegistrations(),
modelsContext
);
final var rootMappingDefaults = new RootMappingDefaults(
optionDefaults,
aggregatedPersistenceUnitMetadata
);
final var xmlProcessingResult = XmlProcessor.processXml(
xmlPreProcessingResult,
aggregatedPersistenceUnitMetadata,
modelCategorizationCollector::apply,
modelsContext,
bootstrapContext,
rootMappingDefaults
);
final HashSet<String> categorizedClassNames = new HashSet<>();
// apply known classes
allKnownClassNames.forEach( (className) -> {
if ( categorizedClassNames.add( className ) ) {
// not known yet
final ClassDetails classDetails = classDetailsRegistry.resolveClassDetails( className );
applyKnownClass( classDetails, categorizedClassNames, classDetailsRegistry, modelCategorizationCollector );
}
} );
// apply known "names" - generally this handles dynamic models
xmlPreProcessingResult.getMappedNames().forEach( (mappedName) -> {
if ( categorizedClassNames.add( mappedName ) ) {
// not known yet
final ClassDetails classDetails = classDetailsRegistry.resolveClassDetails( mappedName );
applyKnownClass( classDetails, categorizedClassNames, classDetailsRegistry, modelCategorizationCollector );
}
} );
xmlProcessingResult.apply();
return new DomainModelSource(
classDetailsRegistry,
CollectionHelper.mutableJoin( allKnownClassNames, xmlPreProcessingResult.getMappedNames() ),
modelCategorizationCollector.getGlobalRegistrations(),
rootMappingDefaults,
aggregatedPersistenceUnitMetadata
);
}
private static void applyKnownClass(
String className,
HashSet<String> categorizedClassNames,
ClassDetailsRegistry classDetailsRegistry,
DomainModelCategorizationCollector modelCategorizationCollector) {
if ( categorizedClassNames.add( className ) ) {
final ClassDetails classDetails = classDetailsRegistry.resolveClassDetails( className );
applyKnownClass( classDetails, categorizedClassNames,classDetailsRegistry, modelCategorizationCollector );
}
}
private static void applyKnownClass(
ClassDetails classDetails,
HashSet<String> categorizedClassNames,
ClassDetailsRegistry classDetailsRegistry,
DomainModelCategorizationCollector modelCategorizationCollector) {
modelCategorizationCollector.apply( classDetails );
final var superClass = classDetails.getSuperClass();
if ( superClass != null && superClass != ClassDetails.OBJECT_CLASS_DETAILS ) {
if ( categorizedClassNames.add( superClass.getClassName() ) ) {
applyKnownClass( superClass, categorizedClassNames, classDetailsRegistry, modelCategorizationCollector );
}
}
}
private static void processAdditionalMappingContributions(
InFlightMetadataCollectorImpl metadataCollector,
MetadataBuildingOptions options,
ClassLoaderService classLoaderService,
MetadataBuildingContextRootImpl rootMetadataBuildingContext) {
final var contributions = new AdditionalMappingContributionsImpl(
metadataCollector,
options,
options.isXmlMappingEnabled() ? new MappingBinder( classLoaderService, () -> false ) : null,
rootMetadataBuildingContext
);
final var additionalMappingContributors =
classLoaderService.loadJavaServices( AdditionalMappingContributor.class );
additionalMappingContributors.forEach( (contributor) -> {
contributions.setCurrentContributor( contributor.getContributorName() );
try {
contributor.contribute(
contributions,
metadataCollector,
classLoaderService,
rootMetadataBuildingContext
);
}
finally {
contributions.setCurrentContributor( null );
}
} );
contributions.complete();
}
private static class AdditionalMappingContributionsImpl implements AdditionalMappingContributions {
private final InFlightMetadataCollectorImpl metadataCollector;
private final MetadataBuildingOptions options;
private final MappingBinder mappingBinder;
private final MetadataBuildingContextRootImpl rootMetadataBuildingContext;
private final EntityHierarchyBuilder hierarchyBuilder = new EntityHierarchyBuilder();
private List<Class<?>> additionalEntityClasses;
private List<ClassDetails> additionalClassDetails;
private List<JaxbEntityMappingsImpl> additionalJaxbMappings;
private boolean extraHbmXml = false;
private String currentContributor;
public AdditionalMappingContributionsImpl(
InFlightMetadataCollectorImpl metadataCollector,
MetadataBuildingOptions options,
MappingBinder mappingBinder,
MetadataBuildingContextRootImpl rootMetadataBuildingContext) {
this.metadataCollector = metadataCollector;
this.options = options;
this.mappingBinder = mappingBinder;
this.rootMetadataBuildingContext = rootMetadataBuildingContext;
}
public void setCurrentContributor(String contributor) {
this.currentContributor = contributor == null ? "orm" : contributor;
}
@Override
public void contributeEntity(Class<?> entityType) {
if ( additionalEntityClasses == null ) {
additionalEntityClasses = new ArrayList<>();
}
additionalEntityClasses.add( entityType );
}
@Override
public void contributeManagedClass(ClassDetails classDetails) {
if ( additionalClassDetails == null ) {
additionalClassDetails = new ArrayList<>();
}
additionalClassDetails.add( classDetails );
rootMetadataBuildingContext.getBootstrapContext()
.getModelsContext()
.getClassDetailsRegistry()
.as( MutableClassDetailsRegistry.class )
.addClassDetails( classDetails.getName(), classDetails );
}
@Override
public void contributeBinding(InputStream xmlStream) {
final Origin origin = new Origin( SourceType.INPUT_STREAM, null );
final var binding = mappingBinder.bind( xmlStream, origin );
final var bindingRoot = binding.getRoot();
if ( bindingRoot instanceof JaxbHbmHibernateMapping hibernateMapping ) {
contributeBinding( hibernateMapping );
}
else if ( bindingRoot instanceof JaxbEntityMappingsImpl entityMappings ) {
contributeBinding( entityMappings );
}
else {
throw new AssertionFailure( "Unexpected binding type" );
}
}
@Override
public void contributeBinding(JaxbEntityMappingsImpl mappingJaxbBinding) {
if ( options.isXmlMappingEnabled() ) {
if ( additionalJaxbMappings == null ) {
additionalJaxbMappings = new ArrayList<>();
}
additionalJaxbMappings.add( mappingJaxbBinding );
}
}
@Override
public void contributeBinding(JaxbHbmHibernateMapping hbmJaxbBinding) {
if ( options.isXmlMappingEnabled() ) {
extraHbmXml = true;
hierarchyBuilder.indexMappingDocument( new MappingDocument(
currentContributor,
hbmJaxbBinding,
new Origin( SourceType.OTHER, null ),
rootMetadataBuildingContext
) );
}
}
@Override
public void contributeTable(Table table) {
final Namespace namespace = metadataCollector.getDatabase().locateNamespace(
table.getCatalogIdentifier(),
table.getSchemaIdentifier()
);
namespace.registerTable( table.getNameIdentifier(), table );
metadataCollector.addTableNameBinding( table.getNameIdentifier(), table );
}
@Override
public void contributeSequence(Sequence sequence) {
final Namespace namespace = metadataCollector.getDatabase().locateNamespace(
sequence.getName().getCatalogName(),
sequence.getName().getSchemaName()
);
namespace.registerSequence( sequence.getName().getSequenceName(), sequence );
}
@Override
public void contributeAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject) {
metadataCollector.addAuxiliaryDatabaseObject( auxiliaryDatabaseObject );
}
@Override
public EffectiveMappingDefaults getEffectiveMappingDefaults() {
return rootMetadataBuildingContext.getEffectiveDefaults();
}
public void complete() {
// annotations / orm.xml
if ( additionalEntityClasses != null || additionalClassDetails != null || additionalJaxbMappings != null ) {
AnnotationMetadataSourceProcessorImpl.processAdditionalMappings(
additionalEntityClasses,
additionalClassDetails,
additionalJaxbMappings,
rootMetadataBuildingContext,
options
);
}
// hbm.xml
if ( extraHbmXml ) {
final var binder = ModelBinder.prepare( rootMetadataBuildingContext );
for ( var entityHierarchySource : hierarchyBuilder.buildHierarchies() ) {
binder.bindEntityHierarchy( entityHierarchySource );
}
}
}
}
private static void applyExtraQueryImports(
ManagedResources managedResources,
InFlightMetadataCollectorImpl metadataCollector) {
final var extraQueryImports = managedResources.getExtraQueryImports();
if ( extraQueryImports != null && !extraQueryImports.isEmpty() ) {
for ( var entry : extraQueryImports.entrySet() ) {
metadataCollector.addImport( entry.getKey(), entry.getValue().getName() );
}
}
}
// todo (7.0) : buildJandexInitializer
// private static JandexInitManager buildJandexInitializer(
// MetadataBuildingOptions options,
// ClassLoaderAccess classLoaderAccess) {
// final boolean autoIndexMembers = ConfigurationHelper.getBoolean(
// org.hibernate.cfg.AvailableSettings.ENABLE_AUTO_INDEX_MEMBER_TYPES,
// options.getServiceRegistry().getService( ConfigurationService.class ).getSettings(),
// false
// );
//
// return new JandexInitManager( options.getJandexView(), classLoaderAccess, autoIndexMembers );
// }
private static void handleTypes(
BootstrapContext bootstrapContext,
MetadataBuildingOptions options,
InFlightMetadataCollector metadataCollector) {
final var classLoaderService = bootstrapContext.getClassLoaderService();
final var typeConfiguration = bootstrapContext.getTypeConfiguration();
final var serviceRegistry = bootstrapContext.getServiceRegistry();
final var jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final var typeContributions = new TypeContributions() {
@Override
public TypeConfiguration getTypeConfiguration() {
return typeConfiguration;
}
@Override
public void contributeAttributeConverter(Class<? extends AttributeConverter<?,?>> converterClass) {
metadataCollector.getConverterRegistry().addAttributeConverter( converterClass );
}
@Override
public void contributeType(CompositeUserType<?> type) {
options.getCompositeUserTypes().add( type );
}
};
if ( options.getWrapperArrayHandling() == WrapperArrayHandling.LEGACY ) {
typeConfiguration.getJavaTypeRegistry().addDescriptor( ByteArrayJavaType.INSTANCE );
typeConfiguration.getJavaTypeRegistry().addDescriptor( CharacterArrayJavaType.INSTANCE );
final var basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
basicTypeRegistry.addTypeReferenceRegistrationKey(
StandardBasicTypes.CHARACTER_ARRAY.getName(),
Character[].class.getName(), "Character[]"
);
basicTypeRegistry.addTypeReferenceRegistrationKey(
StandardBasicTypes.BINARY_WRAPPER.getName(),
Byte[].class.getName(), "Byte[]"
);
}
// add Dialect contributed types
final var dialect = options.getServiceRegistry().requireService( JdbcServices.class ).getDialect();
dialect.contribute( typeContributions, options.getServiceRegistry() );
// add TypeContributor contributed types.
for ( var typeContributor : classLoaderService.loadJavaServices( TypeContributor.class ) ) {
typeContributor.contribute( typeContributions, options.getServiceRegistry() );
}
// add fallback type descriptors
final int preferredSqlTypeCodeForUuid = getPreferredSqlTypeCodeForUuid( serviceRegistry );
if ( preferredSqlTypeCodeForUuid != SqlTypes.UUID ) {
adaptToPreferredSqlTypeCode(
typeConfiguration,
jdbcTypeRegistry,
preferredSqlTypeCodeForUuid,
UUID.class,
StandardBasicTypes.UUID.getName(),
"org.hibernate.type.PostgresUUIDType",
"uuid",
"pg-uuid"
);
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UuidAsBinaryJdbcType.INSTANCE );
}
jdbcTypeRegistry.addDescriptorIfAbsent( JsonAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.VARCHAR_INSTANCE );
if ( jdbcTypeRegistry.getConstructor( SqlTypes.JSON_ARRAY ) == null ) {
if ( jdbcTypeRegistry.getDescriptor( SqlTypes.JSON ).getDdlTypeCode() == SqlTypes.JSON ) {
jdbcTypeRegistry.addTypeConstructor( JsonArrayJdbcTypeConstructor.INSTANCE );
}
else {
jdbcTypeRegistry.addTypeConstructor( JsonAsStringArrayJdbcTypeConstructor.INSTANCE );
}
}
if ( jdbcTypeRegistry.getConstructor( SqlTypes.XML_ARRAY ) == null ) {
if ( jdbcTypeRegistry.getDescriptor( SqlTypes.SQLXML ).getDdlTypeCode() == SqlTypes.SQLXML ) {
jdbcTypeRegistry.addTypeConstructor( XmlArrayJdbcTypeConstructor.INSTANCE );
}
else {
jdbcTypeRegistry.addTypeConstructor( XmlAsStringArrayJdbcTypeConstructor.INSTANCE );
}
}
if ( jdbcTypeRegistry.getConstructor( SqlTypes.ARRAY ) == null ) {
// Default the array constructor to e.g. JSON_ARRAY/XML_ARRAY if needed
final JdbcTypeConstructor constructor =
jdbcTypeRegistry.getConstructor( getPreferredSqlTypeCodeForArray( serviceRegistry ) );
if ( constructor != null ) {
jdbcTypeRegistry.addTypeConstructor( SqlTypes.ARRAY, constructor );
}
}
final int preferredSqlTypeCodeForDuration = getPreferredSqlTypeCodeForDuration( serviceRegistry );
if ( preferredSqlTypeCodeForDuration != SqlTypes.INTERVAL_SECOND ) {
adaptToPreferredSqlTypeCode(
typeConfiguration,
jdbcTypeRegistry,
preferredSqlTypeCodeForDuration,
Duration.class,
StandardBasicTypes.DURATION.getName(),
"org.hibernate.type.DurationType"
);
}
else {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.INTERVAL_SECOND, SqlTypes.DURATION );
}
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.INET, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOMETRY, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.POINT, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOGRAPHY, SqlTypes.GEOMETRY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_BLOB, SqlTypes.BLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_CLOB, SqlTypes.CLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_NCLOB, SqlTypes.NCLOB );
final var ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
// Fallback to the geometry DdlType when geography is requested
final var geometryType = ddlTypeRegistry.getDescriptor( SqlTypes.GEOMETRY );
if ( geometryType != null ) {
ddlTypeRegistry.addDescriptorIfAbsent(
new DdlTypeImpl(
SqlTypes.GEOGRAPHY,
geometryType.getTypeName( (Long) null, (Integer) null, (Integer) null ),
dialect
)
);
}
// add explicit application registered types
typeConfiguration.addBasicTypeRegistrationContributions( options.getBasicTypeRegistrations() );
for ( var compositeUserType : options.getCompositeUserTypes() ) {
metadataCollector.registerCompositeUserType( compositeUserType.returnedClass(),
ReflectHelper.getClass( compositeUserType.getClass() ) );
}
final var timestampWithTimeZoneOverride = getTimestampWithTimeZoneOverride( options, jdbcTypeRegistry );
if ( timestampWithTimeZoneOverride != null ) {
adaptTimestampTypesToDefaultTimeZoneStorage( typeConfiguration, timestampWithTimeZoneOverride );
}
final var timeWithTimeZoneOverride = getTimeWithTimeZoneOverride( options, jdbcTypeRegistry );
if ( timeWithTimeZoneOverride != null ) {
adaptTimeTypesToDefaultTimeZoneStorage( typeConfiguration, timeWithTimeZoneOverride );
}
final int preferredSqlTypeCodeForInstant = getPreferredSqlTypeCodeForInstant( serviceRegistry );
if ( preferredSqlTypeCodeForInstant != SqlTypes.TIMESTAMP_UTC ) {
adaptToPreferredSqlTypeCode(
typeConfiguration,
jdbcTypeRegistry,
preferredSqlTypeCodeForInstant,
Instant.class,
StandardBasicTypes.INSTANT.getName(),
"org.hibernate.type.InstantType",
"instant"
);
}
}
private static void adaptToPreferredSqlTypeCode(
JdbcTypeRegistry jdbcTypeRegistry,
JdbcType dialectUuidDescriptor,
int defaultSqlTypeCode,
int preferredSqlTypeCode) {
if ( jdbcTypeRegistry.findDescriptor( defaultSqlTypeCode ) == dialectUuidDescriptor ) {
jdbcTypeRegistry.addDescriptor(
defaultSqlTypeCode,
jdbcTypeRegistry.getDescriptor( preferredSqlTypeCode )
);
}
// else warning?
}
private static void adaptToPreferredSqlTypeCode(
TypeConfiguration typeConfiguration,
JdbcTypeRegistry jdbcTypeRegistry,
int preferredSqlTypeCode,
Class<?> javaType,
String name,
String... additionalKeys) {
final var javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final var basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final BasicType<?> basicType = new NamedBasicTypeImpl<>(
javaTypeRegistry.resolveDescriptor( javaType ),
jdbcTypeRegistry.getDescriptor( preferredSqlTypeCode ),
name
);
final String[] keys = Arrays.copyOf( additionalKeys, additionalKeys.length + 2 );
keys[additionalKeys.length] = javaType.getSimpleName();
keys[additionalKeys.length + 1] = javaType.getName();
basicTypeRegistry.register( basicType, keys );
}
private static void adaptTimeTypesToDefaultTimeZoneStorage(
TypeConfiguration typeConfiguration,
JdbcType timestampWithTimeZoneOverride) {
final var javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final var basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final BasicType<OffsetTime> offsetDateTimeType = new NamedBasicTypeImpl<>(
javaTypeRegistry.resolveDescriptor( OffsetTime.class ),
timestampWithTimeZoneOverride,
"OffsetTime"
);
basicTypeRegistry.register(
offsetDateTimeType,
"org.hibernate.type.OffsetTimeType",
OffsetTime.class.getSimpleName(),
OffsetTime.class.getName()
);
}
private static void adaptTimestampTypesToDefaultTimeZoneStorage(
TypeConfiguration typeConfiguration,
JdbcType timestampWithTimeZoneOverride) {
final var javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final var basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final BasicType<OffsetDateTime> offsetDateTimeType = new NamedBasicTypeImpl<>(
javaTypeRegistry.resolveDescriptor( OffsetDateTime.class ),
timestampWithTimeZoneOverride,
"OffsetDateTime"
);
final BasicType<ZonedDateTime> zonedDateTimeType = new NamedBasicTypeImpl<>(
javaTypeRegistry.resolveDescriptor( ZonedDateTime.class ),
timestampWithTimeZoneOverride,
"ZonedDateTime"
);
basicTypeRegistry.register(
offsetDateTimeType,
"org.hibernate.type.OffsetDateTimeType",
OffsetDateTime.class.getSimpleName(),
OffsetDateTime.class.getName()
);
basicTypeRegistry.register(
zonedDateTimeType,
"org.hibernate.type.ZonedDateTimeType",
ZonedDateTime.class.getSimpleName(),
ZonedDateTime.class.getName()
);
}
private static JdbcType getTimeWithTimeZoneOverride(MetadataBuildingOptions options, JdbcTypeRegistry jdbcTypeRegistry) {
return switch ( options.getDefaultTimeZoneStorage() ) {
case NORMALIZE ->
// For NORMALIZE, we replace the standard types that use TIME_WITH_TIMEZONE to use TIME
jdbcTypeRegistry.getDescriptor( Types.TIME );
case NORMALIZE_UTC ->
// For NORMALIZE_UTC, we replace the standard types that use TIME_WITH_TIMEZONE to use TIME_UTC
jdbcTypeRegistry.getDescriptor( SqlTypes.TIME_UTC );
default -> null;
};
}
private static JdbcType getTimestampWithTimeZoneOverride(MetadataBuildingOptions options, JdbcTypeRegistry jdbcTypeRegistry) {
return switch (options.getDefaultTimeZoneStorage()) {
case NORMALIZE ->
// For NORMALIZE, we replace the standard types that use TIMESTAMP_WITH_TIMEZONE to use TIMESTAMP
jdbcTypeRegistry.getDescriptor( Types.TIMESTAMP );
case NORMALIZE_UTC ->
// For NORMALIZE_UTC, we replace the standard types that use TIMESTAMP_WITH_TIMEZONE to use TIMESTAMP_UTC
jdbcTypeRegistry.getDescriptor( SqlTypes.TIMESTAMP_UTC );
default -> null;
};
}
private static void addFallbackIfNecessary(
JdbcTypeRegistry jdbcTypeRegistry,
int typeCode,
int fallbackTypeCode) {
if ( !jdbcTypeRegistry.hasRegisteredDescriptor( typeCode ) ) {
jdbcTypeRegistry.addDescriptor( typeCode, jdbcTypeRegistry.getDescriptor( fallbackTypeCode ) );
}
}
}
|
googleapis/google-cloud-java | 37,933 | java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/ListKeyHandlesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/autokey.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.kms.v1;
/**
*
*
* <pre>
* Request message for
* [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.ListKeyHandlesRequest}
*/
public final class ListKeyHandlesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.kms.v1.ListKeyHandlesRequest)
ListKeyHandlesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListKeyHandlesRequest.newBuilder() to construct.
private ListKeyHandlesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListKeyHandlesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListKeyHandlesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_ListKeyHandlesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_ListKeyHandlesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.ListKeyHandlesRequest.class,
com.google.cloud.kms.v1.ListKeyHandlesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Optional limit on the number of
* [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in the response. The
* service may return fewer than this value. Further
* [KeyHandles][google.cloud.kms.v1.KeyHandle] can subsequently be obtained by
* including the
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]
* in a subsequent request. If unspecified, at most 100
* [KeyHandles][google.cloud.kms.v1.KeyHandle] will be returned.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.kms.v1.ListKeyHandlesRequest)) {
return super.equals(obj);
}
com.google.cloud.kms.v1.ListKeyHandlesRequest other =
(com.google.cloud.kms.v1.ListKeyHandlesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.kms.v1.ListKeyHandlesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.ListKeyHandlesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.ListKeyHandlesRequest)
com.google.cloud.kms.v1.ListKeyHandlesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_ListKeyHandlesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_ListKeyHandlesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.ListKeyHandlesRequest.class,
com.google.cloud.kms.v1.ListKeyHandlesRequest.Builder.class);
}
// Construct using com.google.cloud.kms.v1.ListKeyHandlesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.kms.v1.AutokeyProto
.internal_static_google_cloud_kms_v1_ListKeyHandlesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.kms.v1.ListKeyHandlesRequest getDefaultInstanceForType() {
return com.google.cloud.kms.v1.ListKeyHandlesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.kms.v1.ListKeyHandlesRequest build() {
com.google.cloud.kms.v1.ListKeyHandlesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.kms.v1.ListKeyHandlesRequest buildPartial() {
com.google.cloud.kms.v1.ListKeyHandlesRequest result =
new com.google.cloud.kms.v1.ListKeyHandlesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.kms.v1.ListKeyHandlesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.kms.v1.ListKeyHandlesRequest) {
return mergeFrom((com.google.cloud.kms.v1.ListKeyHandlesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.kms.v1.ListKeyHandlesRequest other) {
if (other == com.google.cloud.kms.v1.ListKeyHandlesRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource project and location from which to list
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `projects/{PROJECT_ID}/locations/{LOCATION}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Optional limit on the number of
* [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in the response. The
* service may return fewer than this value. Further
* [KeyHandles][google.cloud.kms.v1.KeyHandle] can subsequently be obtained by
* including the
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]
* in a subsequent request. If unspecified, at most 100
* [KeyHandles][google.cloud.kms.v1.KeyHandle] will be returned.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Optional limit on the number of
* [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in the response. The
* service may return fewer than this value. Further
* [KeyHandles][google.cloud.kms.v1.KeyHandle] can subsequently be obtained by
* including the
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]
* in a subsequent request. If unspecified, at most 100
* [KeyHandles][google.cloud.kms.v1.KeyHandle] will be returned.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Optional limit on the number of
* [KeyHandles][google.cloud.kms.v1.KeyHandle] to include in the response. The
* service may return fewer than this value. Further
* [KeyHandles][google.cloud.kms.v1.KeyHandle] can subsequently be obtained by
* including the
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token]
* in a subsequent request. If unspecified, at most 100
* [KeyHandles][google.cloud.kms.v1.KeyHandle] will be returned.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Optional pagination token, returned earlier via
* [ListKeyHandlesResponse.next_page_token][google.cloud.kms.v1.ListKeyHandlesResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter to apply when listing
* [KeyHandles][google.cloud.kms.v1.KeyHandle], e.g.
* `resource_type_selector="{SERVICE}.googleapis.com/{TYPE}"`.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.ListKeyHandlesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.kms.v1.ListKeyHandlesRequest)
private static final com.google.cloud.kms.v1.ListKeyHandlesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.kms.v1.ListKeyHandlesRequest();
}
public static com.google.cloud.kms.v1.ListKeyHandlesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListKeyHandlesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListKeyHandlesRequest>() {
@java.lang.Override
public ListKeyHandlesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListKeyHandlesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListKeyHandlesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.kms.v1.ListKeyHandlesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 37,999 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.logaggregation;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.io.input.BoundedInputStream;
import org.apache.commons.io.output.WriterOutputStream;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.file.tfile.TFile;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.util.Times;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterables;
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY;
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL;
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_LENGTH;
import static org.apache.hadoop.util.functional.FutureIO.awaitFuture;
@Public
@Evolving
public class AggregatedLogFormat {
private final static Logger LOG = LoggerFactory.getLogger(
AggregatedLogFormat.class);
private static final LogKey APPLICATION_ACL_KEY = new LogKey("APPLICATION_ACL");
private static final LogKey APPLICATION_OWNER_KEY = new LogKey("APPLICATION_OWNER");
private static final LogKey VERSION_KEY = new LogKey("VERSION");
private static final Map<String, LogKey> RESERVED_KEYS;
//Maybe write out the retention policy.
//Maybe write out a list of containerLogs skipped by the retention policy.
private static final int VERSION = 1;
/**
* Umask for the log file.
*/
private static final FsPermission APP_LOG_FILE_UMASK = FsPermission
.createImmutable((short) (0640 ^ 0777));
static {
RESERVED_KEYS = new HashMap<String, AggregatedLogFormat.LogKey>();
RESERVED_KEYS.put(APPLICATION_ACL_KEY.toString(), APPLICATION_ACL_KEY);
RESERVED_KEYS.put(APPLICATION_OWNER_KEY.toString(), APPLICATION_OWNER_KEY);
RESERVED_KEYS.put(VERSION_KEY.toString(), VERSION_KEY);
}
@Public
public static class LogKey implements Writable {
private String keyString;
public LogKey() {
}
public LogKey(ContainerId containerId) {
this.keyString = containerId.toString();
}
public LogKey(String keyString) {
this.keyString = keyString;
}
@Override
public int hashCode() {
return keyString == null ? 0 : keyString.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof LogKey) {
LogKey other = (LogKey) obj;
if (this.keyString == null) {
return other.keyString == null;
}
return this.keyString.equals(other.keyString);
}
return false;
}
@Private
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(this.keyString);
}
@Private
@Override
public void readFields(DataInput in) throws IOException {
this.keyString = in.readUTF();
}
@Override
public String toString() {
return this.keyString;
}
}
@Private
public static class LogValue {
private final List<String> rootLogDirs;
private final ContainerId containerId;
private final String user;
private final LogAggregationContext logAggregationContext;
private Set<File> uploadedFiles = new HashSet<File>();
private final Set<String> alreadyUploadedLogFiles;
private Set<String> allExistingFileMeta = new HashSet<String>();
private final boolean appFinished;
private final boolean containerFinished;
/**
* The retention context to determine if log files are older than
* the retention policy configured.
*/
private final LogRetentionContext logRetentionContext;
/**
* The set of log files that are older than retention policy that will
* not be uploaded but ready for deletion.
*/
private final Set<File> obsoleteRetentionLogFiles = new HashSet<File>();
// TODO Maybe add a version string here. Instead of changing the version of
// the entire k-v format
public LogValue(List<String> rootLogDirs, ContainerId containerId,
String user) {
this(rootLogDirs, containerId, user, null, new HashSet<String>(),
null, true, true);
}
public LogValue(List<String> rootLogDirs, ContainerId containerId,
String user, LogAggregationContext logAggregationContext,
Set<String> alreadyUploadedLogFiles,
LogRetentionContext retentionContext, boolean appFinished,
boolean containerFinished) {
this.rootLogDirs = new ArrayList<String>(rootLogDirs);
this.containerId = containerId;
this.user = user;
// Ensure logs are processed in lexical order
Collections.sort(this.rootLogDirs);
this.logAggregationContext = logAggregationContext;
this.alreadyUploadedLogFiles = alreadyUploadedLogFiles;
this.appFinished = appFinished;
this.containerFinished = containerFinished;
this.logRetentionContext = retentionContext;
}
@VisibleForTesting
public Set<File> getPendingLogFilesToUploadForThisContainer() {
Set<File> pendingUploadFiles = new HashSet<File>();
for (String rootLogDir : this.rootLogDirs) {
File appLogDir = new File(rootLogDir,
this.containerId.getApplicationAttemptId().
getApplicationId().toString());
File containerLogDir =
new File(appLogDir, this.containerId.toString());
if (!containerLogDir.isDirectory()) {
continue; // ContainerDir may have been deleted by the user.
}
pendingUploadFiles
.addAll(getPendingLogFilesToUpload(containerLogDir));
}
return pendingUploadFiles;
}
public void write(DataOutputStream out, Set<File> pendingUploadFiles)
throws IOException {
List<File> fileList = new ArrayList<File>(pendingUploadFiles);
Collections.sort(fileList);
for (File logFile : fileList) {
// We only aggregate top level files.
// Ignore anything inside sub-folders.
if (logFile.isDirectory()) {
LOG.warn(logFile.getAbsolutePath() + " is a directory. Ignore it.");
continue;
}
FileInputStream in = null;
try {
in = secureOpenFile(logFile);
} catch (IOException e) {
logErrorMessage(logFile, e);
IOUtils.cleanupWithLogger(LOG, in);
continue;
}
final long fileLength = logFile.length();
// Write the logFile Type
out.writeUTF(logFile.getName());
// Write the log length as UTF so that it is printable
out.writeUTF(String.valueOf(fileLength));
// Write the log itself
try {
byte[] buf = new byte[65535];
int len = 0;
long bytesLeft = fileLength;
while ((len = in.read(buf)) != -1) {
//If buffer contents within fileLength, write
if (len < bytesLeft) {
out.write(buf, 0, len);
bytesLeft-=len;
}
//else only write contents within fileLength, then exit early
else {
out.write(buf, 0, (int)bytesLeft);
break;
}
}
long newLength = logFile.length();
if(fileLength < newLength) {
LOG.warn("Aggregated logs truncated by approximately "+
(newLength-fileLength) +" bytes.");
}
this.uploadedFiles.add(logFile);
} catch (IOException e) {
String message = logErrorMessage(logFile, e);
out.write(message.getBytes(StandardCharsets.UTF_8));
} finally {
IOUtils.cleanupWithLogger(LOG, in);
}
}
}
@VisibleForTesting
public FileInputStream secureOpenFile(File logFile) throws IOException {
return SecureIOUtils.openForRead(logFile, getUser(), null);
}
private static String logErrorMessage(File logFile, Exception e) {
String message = "Error aggregating log file. Log file : "
+ logFile.getAbsolutePath() + ". " + e.getMessage();
LOG.error(message, e);
return message;
}
// Added for testing purpose.
public String getUser() {
return user;
}
private Set<File> getPendingLogFilesToUpload(File containerLogDir) {
if(containerLogDir == null) {
return new HashSet<>(0);
}
File[] filesList = containerLogDir.listFiles();
if (filesList == null) {
return new HashSet<>(0);
}
Set<File> candidates =
new HashSet<File>(Arrays.asList(filesList));
for (File logFile : candidates) {
this.allExistingFileMeta.add(getLogFileMetaData(logFile));
}
// if log files are older than retention policy, do not upload them.
// but schedule them for deletion.
if(logRetentionContext != null && !logRetentionContext.shouldRetainLog()){
obsoleteRetentionLogFiles.addAll(candidates);
candidates.clear();
return candidates;
}
Set<File> fileCandidates = new HashSet<File>(candidates);
if (this.logAggregationContext != null && candidates.size() > 0) {
fileCandidates = getFileCandidates(fileCandidates, this.appFinished);
if (!this.appFinished && this.containerFinished) {
Set<File> addition = new HashSet<File>(candidates);
addition = getFileCandidates(addition, true);
fileCandidates.addAll(addition);
}
}
return fileCandidates;
}
private Set<File> getFileCandidates(Set<File> candidates,
boolean useRegularPattern) {
filterFiles(
useRegularPattern ? this.logAggregationContext.getIncludePattern()
: this.logAggregationContext.getRolledLogsIncludePattern(),
candidates, false);
filterFiles(
useRegularPattern ? this.logAggregationContext.getExcludePattern()
: this.logAggregationContext.getRolledLogsExcludePattern(),
candidates, true);
Iterable<File> mask = Iterables.filter(candidates, (input) ->
!alreadyUploadedLogFiles
.contains(getLogFileMetaData(input)));
return Sets.newHashSet(mask);
}
private void filterFiles(String pattern, Set<File> candidates,
boolean exclusion) {
if (pattern != null && !pattern.isEmpty()) {
Pattern filterPattern = Pattern.compile(pattern);
for (Iterator<File> candidatesItr = candidates.iterator(); candidatesItr
.hasNext();) {
File candidate = candidatesItr.next();
boolean match = filterPattern.matcher(candidate.getName()).find();
if ((!match && !exclusion) || (match && exclusion)) {
candidatesItr.remove();
}
}
}
}
public Set<Path> getCurrentUpLoadedFilesPath() {
Set<Path> path = new HashSet<Path>();
for (File file : this.uploadedFiles) {
path.add(new Path(file.getAbsolutePath()));
}
return path;
}
public Set<String> getCurrentUpLoadedFileMeta() {
Set<String> info = new HashSet<String>();
for (File file : this.uploadedFiles) {
info.add(getLogFileMetaData(file));
}
return info;
}
public Set<Path> getObsoleteRetentionLogFiles() {
Set<Path> path = new HashSet<Path>();
for(File file: this.obsoleteRetentionLogFiles) {
path.add(new Path(file.getAbsolutePath()));
}
return path;
}
public Set<String> getAllExistingFilesMeta() {
return this.allExistingFileMeta;
}
private String getLogFileMetaData(File file) {
return containerId.toString() + "_" + file.getName() + "_"
+ file.lastModified();
}
}
/**
* A context for log retention to determine if files are older than
* the retention policy configured in YarnConfiguration.
*/
public static class LogRetentionContext {
/**
* The time used with logRetentionMillis, to determine ages of
* log files and if files need to be uploaded.
*/
private final long logInitedTimeMillis;
/**
* The numbers of milli seconds since a log file is created to determine
* if we should upload it. -1 if disabled.
* see YarnConfiguration.LOG_AGGREGATION_RETAIN_SECONDS for details.
*/
private final long logRetentionMillis;
public LogRetentionContext(long logInitedTimeMillis, long
logRetentionMillis) {
this.logInitedTimeMillis = logInitedTimeMillis;
this.logRetentionMillis = logRetentionMillis;
}
public boolean isDisabled() {
return logInitedTimeMillis < 0 || logRetentionMillis < 0;
}
public boolean shouldRetainLog() {
return isDisabled() ||
System.currentTimeMillis() - logInitedTimeMillis < logRetentionMillis;
}
}
/**
* The writer that writes out the aggregated logs.
*/
@Private
public static class LogWriter implements AutoCloseable {
private FSDataOutputStream fsDataOStream;
private TFile.Writer writer;
private FileContext fc;
/**
* Initialize the LogWriter.
* Must be called just after the instance is created.
* @param conf Configuration
* @param remoteAppLogFile remote log file path
* @param userUgi Ugi of the user
* @throws IOException Failed to initialize
*/
public void initialize(final Configuration conf,
final Path remoteAppLogFile,
UserGroupInformation userUgi) throws IOException {
try {
this.fsDataOStream =
userUgi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
@Override
public FSDataOutputStream run() throws Exception {
fc = FileContext.getFileContext(remoteAppLogFile.toUri(), conf);
fc.setUMask(APP_LOG_FILE_UMASK);
return fc.create(
remoteAppLogFile,
EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
new Options.CreateOpts[] {});
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
// Keys are not sorted: null arg
// 256KB minBlockSize : Expected log size for each container too
this.writer =
new TFile.Writer(this.fsDataOStream, 256 * 1024, conf.get(
YarnConfiguration.NM_LOG_AGG_COMPRESSION_TYPE,
YarnConfiguration.DEFAULT_NM_LOG_AGG_COMPRESSION_TYPE), null, conf);
//Write the version string
writeVersion();
}
@VisibleForTesting
public TFile.Writer getWriter() {
return this.writer;
}
private void writeVersion() throws IOException {
try (DataOutputStream out = this.writer.prepareAppendKey(-1)) {
VERSION_KEY.write(out);
}
try (DataOutputStream out = this.writer.prepareAppendValue(-1)) {
out.writeInt(VERSION);
}
}
public void writeApplicationOwner(String user) throws IOException {
try (DataOutputStream out = this.writer.prepareAppendKey(-1)) {
APPLICATION_OWNER_KEY.write(out);
}
try (DataOutputStream out = this.writer.prepareAppendValue(-1)) {
out.writeUTF(user);
}
}
public void writeApplicationACLs(Map<ApplicationAccessType, String> appAcls)
throws IOException {
try (DataOutputStream out = this.writer.prepareAppendKey(-1)) {
APPLICATION_ACL_KEY.write(out);
}
try (DataOutputStream out = this.writer.prepareAppendValue(-1)) {
for (Entry<ApplicationAccessType, String> entry : appAcls.entrySet()) {
out.writeUTF(entry.getKey().toString());
out.writeUTF(entry.getValue());
}
}
}
public void append(LogKey logKey, LogValue logValue) throws IOException {
Set<File> pendingUploadFiles =
logValue.getPendingLogFilesToUploadForThisContainer();
if (pendingUploadFiles.size() == 0) {
return;
}
try (DataOutputStream out = this.writer.prepareAppendKey(-1)) {
logKey.write(out);
}
try (DataOutputStream out = this.writer.prepareAppendValue(-1)) {
logValue.write(out, pendingUploadFiles);
}
}
@Override
public void close() throws DSQuotaExceededException {
try {
if (writer != null) {
writer.close();
}
} catch (Exception e) {
LOG.warn("Exception closing writer", e);
} finally {
try {
this.fsDataOStream.close();
} catch (DSQuotaExceededException e) {
LOG.error("Exception in closing {}",
this.fsDataOStream.getClass(), e);
throw e;
} catch (Throwable e) {
LOG.error("Exception in closing {}",
this.fsDataOStream.getClass(), e);
}
}
}
}
@Public
@Evolving
public static class LogReader implements AutoCloseable {
private final FSDataInputStream fsDataIStream;
private final TFile.Reader.Scanner scanner;
private final TFile.Reader reader;
public LogReader(Configuration conf, Path remoteAppLogFile)
throws IOException {
try {
FileContext fileContext =
FileContext.getFileContext(remoteAppLogFile.toUri(), conf);
FileStatus status = fileContext.getFileStatus(remoteAppLogFile);
this.fsDataIStream = awaitFuture(
fileContext.openFile(remoteAppLogFile)
.opt(FS_OPTION_OPENFILE_READ_POLICY,
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
.optLong(FS_OPTION_OPENFILE_LENGTH,
status.getLen()) // file length hint for object stores
.build());
reader = new TFile.Reader(this.fsDataIStream,
status.getLen(), conf);
this.scanner = reader.createScanner();
} catch (IOException ioe) {
close();
throw new IOException("Error in creating LogReader", ioe);
}
}
private boolean atBeginning = true;
/**
* Returns the owner of the application.
*
* @return the application owner.
* @throws IOException if we can not get the application owner.
*/
public String getApplicationOwner() throws IOException {
TFile.Reader.Scanner ownerScanner = null;
try {
ownerScanner = reader.createScanner();
LogKey key = new LogKey();
while (!ownerScanner.atEnd()) {
TFile.Reader.Scanner.Entry entry = ownerScanner.entry();
key.readFields(entry.getKeyStream());
if (key.toString().equals(APPLICATION_OWNER_KEY.toString())) {
DataInputStream valueStream = entry.getValueStream();
return valueStream.readUTF();
}
ownerScanner.advance();
}
return null;
} finally {
IOUtils.cleanupWithLogger(LOG, ownerScanner);
}
}
/**
* Returns ACLs for the application. An empty map is returned if no ACLs are
* found.
*
* @return a map of the Application ACLs.
* @throws IOException if we can not get the application acls.
*/
public Map<ApplicationAccessType, String> getApplicationAcls()
throws IOException {
// TODO Seek directly to the key once a comparator is specified.
TFile.Reader.Scanner aclScanner = null;
try {
aclScanner = reader.createScanner();
LogKey key = new LogKey();
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>();
while (!aclScanner.atEnd()) {
TFile.Reader.Scanner.Entry entry = aclScanner.entry();
key.readFields(entry.getKeyStream());
if (key.toString().equals(APPLICATION_ACL_KEY.toString())) {
DataInputStream valueStream = entry.getValueStream();
while (true) {
String appAccessOp = null;
String aclString = null;
try {
appAccessOp = valueStream.readUTF();
} catch (EOFException e) {
// Valid end of stream.
break;
}
try {
aclString = valueStream.readUTF();
} catch (EOFException e) {
throw new YarnRuntimeException("Error reading ACLs", e);
}
acls.put(ApplicationAccessType.valueOf(appAccessOp), aclString);
}
}
aclScanner.advance();
}
return acls;
} finally {
IOUtils.cleanupWithLogger(LOG, aclScanner);
}
}
/**
* Read the next key and return the value-stream.
*
* @param key the log key
* @return the valueStream if there are more keys or null otherwise
* @throws IOException if we can not get the dataInputStream
* for the next key
*/
public DataInputStream next(LogKey key) throws IOException {
if (!this.atBeginning) {
this.scanner.advance();
} else {
this.atBeginning = false;
}
if (this.scanner.atEnd()) {
return null;
}
TFile.Reader.Scanner.Entry entry = this.scanner.entry();
key.readFields(entry.getKeyStream());
// Skip META keys
if (RESERVED_KEYS.containsKey(key.toString())) {
return next(key);
}
DataInputStream valueStream = entry.getValueStream();
return valueStream;
}
/**
* Get a ContainerLogsReader to read the logs for
* the specified container.
*
* @param containerId the containerId
* @return object to read the container's logs or null if the
* logs could not be found
* @throws IOException if we can not get the container log reader.
*/
@Private
public ContainerLogsReader getContainerLogsReader(
ContainerId containerId) throws IOException {
ContainerLogsReader logReader = null;
final LogKey containerKey = new LogKey(containerId);
LogKey key = new LogKey();
DataInputStream valueStream = next(key);
while (valueStream != null && !key.equals(containerKey)) {
valueStream = next(key);
}
if (valueStream != null) {
logReader = new ContainerLogsReader(valueStream);
}
return logReader;
}
//TODO Change Log format and interfaces to be containerId specific.
// Avoid returning completeValueStreams.
// public List<String> getTypesForContainer(DataInputStream valueStream){}
//
// /**
// * @param valueStream
// * The Log stream for the container.
// * @param fileType
// * the log type required.
// * @return An InputStreamReader for the required log type or null if the
// * type is not found.
// * @throws IOException
// */
// public InputStreamReader getLogStreamForType(DataInputStream valueStream,
// String fileType) throws IOException {
// valueStream.reset();
// try {
// while (true) {
// String ft = valueStream.readUTF();
// String fileLengthStr = valueStream.readUTF();
// long fileLength = Long.parseLong(fileLengthStr);
// if (ft.equals(fileType)) {
// BoundedInputStream bis =
// new BoundedInputStream(valueStream, fileLength);
// return new InputStreamReader(bis);
// } else {
// long totalSkipped = 0;
// long currSkipped = 0;
// while (currSkipped != -1 && totalSkipped < fileLength) {
// currSkipped = valueStream.skip(fileLength - totalSkipped);
// totalSkipped += currSkipped;
// }
// // TODO Verify skip behaviour.
// if (currSkipped == -1) {
// return null;
// }
// }
// }
// } catch (EOFException e) {
// return null;
// }
// }
/**
* Writes all logs for a single container to the provided writer.
* @param valueStream the valueStream
* @param writer the log writer
* @param logUploadedTime the time stamp
* @throws IOException if we can not read the container logs.
*/
public static void readAcontainerLogs(DataInputStream valueStream,
Writer writer, long logUploadedTime) throws IOException {
OutputStream os = null;
PrintStream ps = null;
try {
os = WriterOutputStream.builder()
.setWriter(writer)
.setCharset(StandardCharsets.UTF_8)
.get();
ps = new PrintStream(os);
while (true) {
try {
readContainerLogs(valueStream, ps, logUploadedTime, Long.MAX_VALUE);
} catch (EOFException e) {
// EndOfFile
return;
}
}
} finally {
IOUtils.cleanupWithLogger(LOG, ps, os);
}
}
/**
* Writes all logs for a single container to the provided writer.
* @param valueStream the value stream
* @param writer the log writer
* @throws IOException if we can not read the container logs.
*/
public static void readAcontainerLogs(DataInputStream valueStream,
Writer writer) throws IOException {
readAcontainerLogs(valueStream, writer, -1);
}
private static void readContainerLogs(DataInputStream valueStream,
PrintStream out, long logUploadedTime, long bytes)
throws IOException {
byte[] buf = new byte[65535];
String fileType = valueStream.readUTF();
String fileLengthStr = valueStream.readUTF();
long fileLength = Long.parseLong(fileLengthStr);
out.print("LogType:");
out.println(fileType);
if (logUploadedTime != -1) {
out.print("Log Upload Time:");
out.println(Times.format(logUploadedTime));
}
out.print("LogLength:");
out.println(fileLengthStr);
out.println("Log Contents:");
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
long curRead = 0;
long pendingRead = totalBytesToRead - curRead;
int toRead =
pendingRead > buf.length ? buf.length : (int) pendingRead;
int len = valueStream.read(buf, 0, toRead);
while (len != -1 && curRead < totalBytesToRead) {
out.write(buf, 0, len);
curRead += len;
pendingRead = totalBytesToRead - curRead;
toRead =
pendingRead > buf.length ? buf.length : (int) pendingRead;
len = valueStream.read(buf, 0, toRead);
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, skipAfterRead);
out.println("\nEnd of LogType:" + fileType);
out.println("");
}
/**
* Keep calling this till you get a {@link EOFException} for getting logs of
* all types for a single container.
*
* @param valueStream the value stream
* @param out the print stream
* @param logUploadedTime the time stamp
* @throws IOException if we can not read the container log by specifying
* the container log type.
*/
public static void readAContainerLogsForALogType(
DataInputStream valueStream, PrintStream out, long logUploadedTime)
throws IOException {
readContainerLogs(valueStream, out, logUploadedTime, Long.MAX_VALUE);
}
/**
* Keep calling this till you get a {@link EOFException} for getting logs of
* all types for a single container for the specific bytes.
*
* @param valueStream the value stream
* @param out the output print stream
* @param logUploadedTime the log upload time stamp
* @param bytes the output size of the log
* @throws IOException if we can not read the container log
*/
public static void readAContainerLogsForALogType(
DataInputStream valueStream, PrintStream out, long logUploadedTime,
long bytes) throws IOException {
readContainerLogs(valueStream, out, logUploadedTime, bytes);
}
/**
* Keep calling this till you get a {@link EOFException} for getting logs of
* all types for a single container.
*
* @param valueStream the value stream
* @param out the output print stream
* @throws IOException if we can not read the container log
*/
public static void readAContainerLogsForALogType(
DataInputStream valueStream, PrintStream out)
throws IOException {
readAContainerLogsForALogType(valueStream, out, -1);
}
/**
* Keep calling this till you get a {@link EOFException} for getting logs of
* the specific types for a single container.
* @param valueStream the value stream
* @param out the output print stream
* @param logUploadedTime the log uploaded time stamp
* @param logType the given log type
* @throws IOException if we can not read the container logs
* @return If logType contains fileType, return 1, otherwise return 0.
*/
public static int readContainerLogsForALogType(
DataInputStream valueStream, PrintStream out, long logUploadedTime,
List<String> logType) throws IOException {
return readContainerLogsForALogType(valueStream, out, logUploadedTime,
logType, Long.MAX_VALUE);
}
/**
* Keep calling this till you get a {@link EOFException} for getting logs of
* the specific types for a single container.
* @param valueStream the value stream
* @param out the output print stream
* @param logUploadedTime the log uploaded time stamp
* @param logType the given log type
* @param bytes log bytes.
* @throws IOException if we can not read the container logs
* @return If logType contains fileType, return 1, otherwise return 0.
*/
public static int readContainerLogsForALogType(
DataInputStream valueStream, PrintStream out, long logUploadedTime,
List<String> logType, long bytes) throws IOException {
byte[] buf = new byte[65535];
String fileType = valueStream.readUTF();
String fileLengthStr = valueStream.readUTF();
long fileLength = Long.parseLong(fileLengthStr);
if (logType.contains(fileType)) {
out.print("LogType:");
out.println(fileType);
if (logUploadedTime != -1) {
out.print("Log Upload Time:");
out.println(Times.format(logUploadedTime));
}
out.print("LogLength:");
out.println(fileLengthStr);
out.println("Log Contents:");
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
long curRead = 0;
long pendingRead = totalBytesToRead - curRead;
int toRead = pendingRead > buf.length ? buf.length : (int) pendingRead;
int len = valueStream.read(buf, 0, toRead);
while (len != -1 && curRead < totalBytesToRead) {
out.write(buf, 0, len);
curRead += len;
pendingRead = totalBytesToRead - curRead;
toRead = pendingRead > buf.length ? buf.length : (int) pendingRead;
len = valueStream.read(buf, 0, toRead);
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, skipAfterRead);
out.println("\nEnd of LogType:" + fileType);
out.println("");
return 0;
} else {
long totalSkipped = 0;
long currSkipped = 0;
while (currSkipped != -1 && totalSkipped < fileLength) {
currSkipped = valueStream.skip(fileLength - totalSkipped);
totalSkipped += currSkipped;
}
return -1;
}
}
@Private
public static Pair<String, String> readContainerMetaDataAndSkipData(
DataInputStream valueStream) throws IOException {
String fileType = valueStream.readUTF();
String fileLengthStr = valueStream.readUTF();
long fileLength = Long.parseLong(fileLengthStr);
Pair<String, String> logMeta = new Pair<String, String>(
fileType, fileLengthStr);
long totalSkipped = 0;
long currSkipped = 0;
while (currSkipped != -1 && totalSkipped < fileLength) {
currSkipped = valueStream.skip(fileLength - totalSkipped);
totalSkipped += currSkipped;
}
return logMeta;
}
public void close() {
IOUtils.cleanupWithLogger(LOG, scanner, reader, fsDataIStream);
}
}
@Private
public static class ContainerLogsReader extends InputStream {
private DataInputStream valueStream;
private String currentLogType = null;
private long currentLogLength = 0;
private BoundedInputStream currentLogData = null;
private InputStreamReader currentLogISR;
public ContainerLogsReader(DataInputStream stream) {
valueStream = stream;
}
public String nextLog() throws IOException {
if (currentLogData != null && currentLogLength > 0) {
// seek to the end of the current log, relying on BoundedInputStream
// to prevent seeking past the end of the current log
do {
if (currentLogData.skip(currentLogLength) < 0) {
break;
}
} while (currentLogData.read() != -1);
}
currentLogType = null;
currentLogLength = 0;
currentLogData = null;
currentLogISR = null;
try {
String logType = valueStream.readUTF();
String logLengthStr = valueStream.readUTF();
currentLogLength = Long.parseLong(logLengthStr);
currentLogData =
new BoundedInputStream(valueStream, currentLogLength);
currentLogData.setPropagateClose(false);
currentLogISR = new InputStreamReader(currentLogData,
StandardCharsets.UTF_8);
currentLogType = logType;
} catch (EOFException e) {
}
return currentLogType;
}
public String getCurrentLogType() {
return currentLogType;
}
public long getCurrentLogLength() {
return currentLogLength;
}
public long skip(long n) throws IOException {
return currentLogData.skip(n);
}
public int read() throws IOException {
return currentLogData.read();
}
public int read(byte[] buf, int off, int len) throws IOException {
return currentLogData.read(buf, off, len);
}
public int read(char[] buf, int off, int len) throws IOException {
return currentLogISR.read(buf, off, len);
}
}
}
|
googleapis/google-cloud-java | 38,064 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CreateIndexOperationMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/index_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Runtime operation information for
* [IndexService.CreateIndex][google.cloud.aiplatform.v1.IndexService.CreateIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreateIndexOperationMetadata}
*/
public final class CreateIndexOperationMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CreateIndexOperationMetadata)
CreateIndexOperationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateIndexOperationMetadata.newBuilder() to construct.
private CreateIndexOperationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateIndexOperationMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateIndexOperationMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateIndexOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateIndexOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.class,
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.Builder.class);
}
private int bitField0_;
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
public static final int NEAREST_NEIGHBOR_SEARCH_OPERATION_METADATA_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
nearestNeighborSearchOperationMetadata_;
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return Whether the nearestNeighborSearchOperationMetadata field is set.
*/
@java.lang.Override
public boolean hasNearestNeighborSearchOperationMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return The nearestNeighborSearchOperationMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
getNearestNeighborSearchOperationMetadata() {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder
getNearestNeighborSearchOperationMetadataOrBuilder() {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGenericMetadata());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getNearestNeighborSearchOperationMetadata());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
if (((bitField0_ & 0x00000002) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getNearestNeighborSearchOperationMetadata());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata other =
(com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (hasNearestNeighborSearchOperationMetadata()
!= other.hasNearestNeighborSearchOperationMetadata()) return false;
if (hasNearestNeighborSearchOperationMetadata()) {
if (!getNearestNeighborSearchOperationMetadata()
.equals(other.getNearestNeighborSearchOperationMetadata())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
if (hasNearestNeighborSearchOperationMetadata()) {
hash = (37 * hash) + NEAREST_NEIGHBOR_SEARCH_OPERATION_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getNearestNeighborSearchOperationMetadata().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Runtime operation information for
* [IndexService.CreateIndex][google.cloud.aiplatform.v1.IndexService.CreateIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreateIndexOperationMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CreateIndexOperationMetadata)
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateIndexOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateIndexOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.class,
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGenericMetadataFieldBuilder();
getNearestNeighborSearchOperationMetadataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
nearestNeighborSearchOperationMetadata_ = null;
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
nearestNeighborSearchOperationMetadataBuilder_.dispose();
nearestNeighborSearchOperationMetadataBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateIndexOperationMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata build() {
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata buildPartial() {
com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata result =
new com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.genericMetadata_ =
genericMetadataBuilder_ == null ? genericMetadata_ : genericMetadataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nearestNeighborSearchOperationMetadata_ =
nearestNeighborSearchOperationMetadataBuilder_ == null
? nearestNeighborSearchOperationMetadata_
: nearestNeighborSearchOperationMetadataBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata) {
return mergeFrom((com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata other) {
if (other == com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata.getDefaultInstance())
return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
if (other.hasNearestNeighborSearchOperationMetadata()) {
mergeNearestNeighborSearchOperationMetadata(
other.getNearestNeighborSearchOperationMetadata());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGenericMetadataFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getNearestNeighborSearchOperationMetadataFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
} else {
genericMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& genericMetadata_ != null
&& genericMetadata_
!= com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()) {
getGenericMetadataBuilder().mergeFrom(value);
} else {
genericMetadata_ = value;
}
} else {
genericMetadataBuilder_.mergeFrom(value);
}
if (genericMetadata_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
bitField0_ = (bitField0_ & ~0x00000001);
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
private com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
nearestNeighborSearchOperationMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>
nearestNeighborSearchOperationMetadataBuilder_;
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return Whether the nearestNeighborSearchOperationMetadata field is set.
*/
public boolean hasNearestNeighborSearchOperationMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return The nearestNeighborSearchOperationMetadata.
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
getNearestNeighborSearchOperationMetadata() {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
} else {
return nearestNeighborSearchOperationMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder setNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata value) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nearestNeighborSearchOperationMetadata_ = value;
} else {
nearestNeighborSearchOperationMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder setNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder
builderForValue) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
nearestNeighborSearchOperationMetadata_ = builderForValue.build();
} else {
nearestNeighborSearchOperationMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder mergeNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata value) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& nearestNeighborSearchOperationMetadata_ != null
&& nearestNeighborSearchOperationMetadata_
!= com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()) {
getNearestNeighborSearchOperationMetadataBuilder().mergeFrom(value);
} else {
nearestNeighborSearchOperationMetadata_ = value;
}
} else {
nearestNeighborSearchOperationMetadataBuilder_.mergeFrom(value);
}
if (nearestNeighborSearchOperationMetadata_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder clearNearestNeighborSearchOperationMetadata() {
bitField0_ = (bitField0_ & ~0x00000002);
nearestNeighborSearchOperationMetadata_ = null;
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
nearestNeighborSearchOperationMetadataBuilder_.dispose();
nearestNeighborSearchOperationMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder
getNearestNeighborSearchOperationMetadataBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getNearestNeighborSearchOperationMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder
getNearestNeighborSearchOperationMetadataOrBuilder() {
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
return nearestNeighborSearchOperationMetadataBuilder_.getMessageOrBuilder();
} else {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>
getNearestNeighborSearchOperationMetadataFieldBuilder() {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
nearestNeighborSearchOperationMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>(
getNearestNeighborSearchOperationMetadata(), getParentForChildren(), isClean());
nearestNeighborSearchOperationMetadata_ = null;
}
return nearestNeighborSearchOperationMetadataBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CreateIndexOperationMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CreateIndexOperationMetadata)
private static final com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata();
}
public static com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateIndexOperationMetadata> PARSER =
new com.google.protobuf.AbstractParser<CreateIndexOperationMetadata>() {
@java.lang.Override
public CreateIndexOperationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateIndexOperationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateIndexOperationMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateIndexOperationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,064 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/UpdateIndexOperationMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/index_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Runtime operation information for
* [IndexService.UpdateIndex][google.cloud.aiplatform.v1.IndexService.UpdateIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.UpdateIndexOperationMetadata}
*/
public final class UpdateIndexOperationMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.UpdateIndexOperationMetadata)
UpdateIndexOperationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateIndexOperationMetadata.newBuilder() to construct.
private UpdateIndexOperationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateIndexOperationMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateIndexOperationMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_UpdateIndexOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_UpdateIndexOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.class,
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.Builder.class);
}
private int bitField0_;
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
public static final int NEAREST_NEIGHBOR_SEARCH_OPERATION_METADATA_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
nearestNeighborSearchOperationMetadata_;
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return Whether the nearestNeighborSearchOperationMetadata field is set.
*/
@java.lang.Override
public boolean hasNearestNeighborSearchOperationMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return The nearestNeighborSearchOperationMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
getNearestNeighborSearchOperationMetadata() {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder
getNearestNeighborSearchOperationMetadataOrBuilder() {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGenericMetadata());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getNearestNeighborSearchOperationMetadata());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
if (((bitField0_ & 0x00000002) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getNearestNeighborSearchOperationMetadata());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata other =
(com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (hasNearestNeighborSearchOperationMetadata()
!= other.hasNearestNeighborSearchOperationMetadata()) return false;
if (hasNearestNeighborSearchOperationMetadata()) {
if (!getNearestNeighborSearchOperationMetadata()
.equals(other.getNearestNeighborSearchOperationMetadata())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
if (hasNearestNeighborSearchOperationMetadata()) {
hash = (37 * hash) + NEAREST_NEIGHBOR_SEARCH_OPERATION_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getNearestNeighborSearchOperationMetadata().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Runtime operation information for
* [IndexService.UpdateIndex][google.cloud.aiplatform.v1.IndexService.UpdateIndex].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.UpdateIndexOperationMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.UpdateIndexOperationMetadata)
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_UpdateIndexOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_UpdateIndexOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.class,
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGenericMetadataFieldBuilder();
getNearestNeighborSearchOperationMetadataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
nearestNeighborSearchOperationMetadata_ = null;
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
nearestNeighborSearchOperationMetadataBuilder_.dispose();
nearestNeighborSearchOperationMetadataBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.IndexServiceProto
.internal_static_google_cloud_aiplatform_v1_UpdateIndexOperationMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata build() {
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata buildPartial() {
com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata result =
new com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.genericMetadata_ =
genericMetadataBuilder_ == null ? genericMetadata_ : genericMetadataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nearestNeighborSearchOperationMetadata_ =
nearestNeighborSearchOperationMetadataBuilder_ == null
? nearestNeighborSearchOperationMetadata_
: nearestNeighborSearchOperationMetadataBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata) {
return mergeFrom((com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata other) {
if (other == com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata.getDefaultInstance())
return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
if (other.hasNearestNeighborSearchOperationMetadata()) {
mergeNearestNeighborSearchOperationMetadata(
other.getNearestNeighborSearchOperationMetadata());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGenericMetadataFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getNearestNeighborSearchOperationMetadataFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
} else {
genericMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& genericMetadata_ != null
&& genericMetadata_
!= com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()) {
getGenericMetadataBuilder().mergeFrom(value);
} else {
genericMetadata_ = value;
}
} else {
genericMetadataBuilder_.mergeFrom(value);
}
if (genericMetadata_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
bitField0_ = (bitField0_ & ~0x00000001);
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* The operation generic information.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
private com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
nearestNeighborSearchOperationMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>
nearestNeighborSearchOperationMetadataBuilder_;
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return Whether the nearestNeighborSearchOperationMetadata field is set.
*/
public boolean hasNearestNeighborSearchOperationMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*
* @return The nearestNeighborSearchOperationMetadata.
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
getNearestNeighborSearchOperationMetadata() {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
} else {
return nearestNeighborSearchOperationMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder setNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata value) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
nearestNeighborSearchOperationMetadata_ = value;
} else {
nearestNeighborSearchOperationMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder setNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder
builderForValue) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
nearestNeighborSearchOperationMetadata_ = builderForValue.build();
} else {
nearestNeighborSearchOperationMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder mergeNearestNeighborSearchOperationMetadata(
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata value) {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& nearestNeighborSearchOperationMetadata_ != null
&& nearestNeighborSearchOperationMetadata_
!= com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()) {
getNearestNeighborSearchOperationMetadataBuilder().mergeFrom(value);
} else {
nearestNeighborSearchOperationMetadata_ = value;
}
} else {
nearestNeighborSearchOperationMetadataBuilder_.mergeFrom(value);
}
if (nearestNeighborSearchOperationMetadata_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public Builder clearNearestNeighborSearchOperationMetadata() {
bitField0_ = (bitField0_ & ~0x00000002);
nearestNeighborSearchOperationMetadata_ = null;
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
nearestNeighborSearchOperationMetadataBuilder_.dispose();
nearestNeighborSearchOperationMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder
getNearestNeighborSearchOperationMetadataBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getNearestNeighborSearchOperationMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
public com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder
getNearestNeighborSearchOperationMetadataOrBuilder() {
if (nearestNeighborSearchOperationMetadataBuilder_ != null) {
return nearestNeighborSearchOperationMetadataBuilder_.getMessageOrBuilder();
} else {
return nearestNeighborSearchOperationMetadata_ == null
? com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata
.getDefaultInstance()
: nearestNeighborSearchOperationMetadata_;
}
}
/**
*
*
* <pre>
* The operation metadata with regard to Matching Engine Index operation.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata nearest_neighbor_search_operation_metadata = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>
getNearestNeighborSearchOperationMetadataFieldBuilder() {
if (nearestNeighborSearchOperationMetadataBuilder_ == null) {
nearestNeighborSearchOperationMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.NearestNeighborSearchOperationMetadataOrBuilder>(
getNearestNeighborSearchOperationMetadata(), getParentForChildren(), isClean());
nearestNeighborSearchOperationMetadata_ = null;
}
return nearestNeighborSearchOperationMetadataBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.UpdateIndexOperationMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.UpdateIndexOperationMetadata)
private static final com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata();
}
public static com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateIndexOperationMetadata> PARSER =
new com.google.protobuf.AbstractParser<UpdateIndexOperationMetadata>() {
@java.lang.Override
public UpdateIndexOperationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateIndexOperationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateIndexOperationMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.UpdateIndexOperationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,826 | java-appengine-admin/proto-google-cloud-appengine-admin-v1/src/main/java/com/google/appengine/v1/ApiConfigHandler.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/appengine/v1/app_yaml.proto
// Protobuf Java Version: 3.25.8
package com.google.appengine.v1;
/**
*
*
* <pre>
* [Google Cloud Endpoints](https://cloud.google.com/appengine/docs/python/endpoints/)
* configuration for API handlers.
* </pre>
*
* Protobuf type {@code google.appengine.v1.ApiConfigHandler}
*/
public final class ApiConfigHandler extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.appengine.v1.ApiConfigHandler)
ApiConfigHandlerOrBuilder {
private static final long serialVersionUID = 0L;
// Use ApiConfigHandler.newBuilder() to construct.
private ApiConfigHandler(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ApiConfigHandler() {
authFailAction_ = 0;
login_ = 0;
script_ = "";
securityLevel_ = 0;
url_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ApiConfigHandler();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.appengine.v1.AppYamlProto
.internal_static_google_appengine_v1_ApiConfigHandler_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppYamlProto
.internal_static_google_appengine_v1_ApiConfigHandler_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.ApiConfigHandler.class,
com.google.appengine.v1.ApiConfigHandler.Builder.class);
}
public static final int AUTH_FAIL_ACTION_FIELD_NUMBER = 1;
private int authFailAction_ = 0;
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @return The enum numeric value on the wire for authFailAction.
*/
@java.lang.Override
public int getAuthFailActionValue() {
return authFailAction_;
}
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @return The authFailAction.
*/
@java.lang.Override
public com.google.appengine.v1.AuthFailAction getAuthFailAction() {
com.google.appengine.v1.AuthFailAction result =
com.google.appengine.v1.AuthFailAction.forNumber(authFailAction_);
return result == null ? com.google.appengine.v1.AuthFailAction.UNRECOGNIZED : result;
}
public static final int LOGIN_FIELD_NUMBER = 2;
private int login_ = 0;
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @return The enum numeric value on the wire for login.
*/
@java.lang.Override
public int getLoginValue() {
return login_;
}
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @return The login.
*/
@java.lang.Override
public com.google.appengine.v1.LoginRequirement getLogin() {
com.google.appengine.v1.LoginRequirement result =
com.google.appengine.v1.LoginRequirement.forNumber(login_);
return result == null ? com.google.appengine.v1.LoginRequirement.UNRECOGNIZED : result;
}
public static final int SCRIPT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object script_ = "";
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @return The script.
*/
@java.lang.Override
public java.lang.String getScript() {
java.lang.Object ref = script_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
script_ = s;
return s;
}
}
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @return The bytes for script.
*/
@java.lang.Override
public com.google.protobuf.ByteString getScriptBytes() {
java.lang.Object ref = script_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
script_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SECURITY_LEVEL_FIELD_NUMBER = 4;
private int securityLevel_ = 0;
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @return The enum numeric value on the wire for securityLevel.
*/
@java.lang.Override
public int getSecurityLevelValue() {
return securityLevel_;
}
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @return The securityLevel.
*/
@java.lang.Override
public com.google.appengine.v1.SecurityLevel getSecurityLevel() {
com.google.appengine.v1.SecurityLevel result =
com.google.appengine.v1.SecurityLevel.forNumber(securityLevel_);
return result == null ? com.google.appengine.v1.SecurityLevel.UNRECOGNIZED : result;
}
public static final int URL_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object url_ = "";
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @return The url.
*/
@java.lang.Override
public java.lang.String getUrl() {
java.lang.Object ref = url_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
url_ = s;
return s;
}
}
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @return The bytes for url.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUrlBytes() {
java.lang.Object ref = url_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
url_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (authFailAction_
!= com.google.appengine.v1.AuthFailAction.AUTH_FAIL_ACTION_UNSPECIFIED.getNumber()) {
output.writeEnum(1, authFailAction_);
}
if (login_ != com.google.appengine.v1.LoginRequirement.LOGIN_UNSPECIFIED.getNumber()) {
output.writeEnum(2, login_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(script_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, script_);
}
if (securityLevel_ != com.google.appengine.v1.SecurityLevel.SECURE_UNSPECIFIED.getNumber()) {
output.writeEnum(4, securityLevel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(url_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, url_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (authFailAction_
!= com.google.appengine.v1.AuthFailAction.AUTH_FAIL_ACTION_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, authFailAction_);
}
if (login_ != com.google.appengine.v1.LoginRequirement.LOGIN_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, login_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(script_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, script_);
}
if (securityLevel_ != com.google.appengine.v1.SecurityLevel.SECURE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, securityLevel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(url_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, url_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.appengine.v1.ApiConfigHandler)) {
return super.equals(obj);
}
com.google.appengine.v1.ApiConfigHandler other = (com.google.appengine.v1.ApiConfigHandler) obj;
if (authFailAction_ != other.authFailAction_) return false;
if (login_ != other.login_) return false;
if (!getScript().equals(other.getScript())) return false;
if (securityLevel_ != other.securityLevel_) return false;
if (!getUrl().equals(other.getUrl())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AUTH_FAIL_ACTION_FIELD_NUMBER;
hash = (53 * hash) + authFailAction_;
hash = (37 * hash) + LOGIN_FIELD_NUMBER;
hash = (53 * hash) + login_;
hash = (37 * hash) + SCRIPT_FIELD_NUMBER;
hash = (53 * hash) + getScript().hashCode();
hash = (37 * hash) + SECURITY_LEVEL_FIELD_NUMBER;
hash = (53 * hash) + securityLevel_;
hash = (37 * hash) + URL_FIELD_NUMBER;
hash = (53 * hash) + getUrl().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.ApiConfigHandler parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.appengine.v1.ApiConfigHandler parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.ApiConfigHandler parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.appengine.v1.ApiConfigHandler prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* [Google Cloud Endpoints](https://cloud.google.com/appengine/docs/python/endpoints/)
* configuration for API handlers.
* </pre>
*
* Protobuf type {@code google.appengine.v1.ApiConfigHandler}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.appengine.v1.ApiConfigHandler)
com.google.appengine.v1.ApiConfigHandlerOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.appengine.v1.AppYamlProto
.internal_static_google_appengine_v1_ApiConfigHandler_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppYamlProto
.internal_static_google_appengine_v1_ApiConfigHandler_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.ApiConfigHandler.class,
com.google.appengine.v1.ApiConfigHandler.Builder.class);
}
// Construct using com.google.appengine.v1.ApiConfigHandler.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
authFailAction_ = 0;
login_ = 0;
script_ = "";
securityLevel_ = 0;
url_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.appengine.v1.AppYamlProto
.internal_static_google_appengine_v1_ApiConfigHandler_descriptor;
}
@java.lang.Override
public com.google.appengine.v1.ApiConfigHandler getDefaultInstanceForType() {
return com.google.appengine.v1.ApiConfigHandler.getDefaultInstance();
}
@java.lang.Override
public com.google.appengine.v1.ApiConfigHandler build() {
com.google.appengine.v1.ApiConfigHandler result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.appengine.v1.ApiConfigHandler buildPartial() {
com.google.appengine.v1.ApiConfigHandler result =
new com.google.appengine.v1.ApiConfigHandler(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.appengine.v1.ApiConfigHandler result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.authFailAction_ = authFailAction_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.login_ = login_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.script_ = script_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.securityLevel_ = securityLevel_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.url_ = url_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.appengine.v1.ApiConfigHandler) {
return mergeFrom((com.google.appengine.v1.ApiConfigHandler) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.appengine.v1.ApiConfigHandler other) {
if (other == com.google.appengine.v1.ApiConfigHandler.getDefaultInstance()) return this;
if (other.authFailAction_ != 0) {
setAuthFailActionValue(other.getAuthFailActionValue());
}
if (other.login_ != 0) {
setLoginValue(other.getLoginValue());
}
if (!other.getScript().isEmpty()) {
script_ = other.script_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.securityLevel_ != 0) {
setSecurityLevelValue(other.getSecurityLevelValue());
}
if (!other.getUrl().isEmpty()) {
url_ = other.url_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
authFailAction_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
login_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
script_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
securityLevel_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
case 42:
{
url_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int authFailAction_ = 0;
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @return The enum numeric value on the wire for authFailAction.
*/
@java.lang.Override
public int getAuthFailActionValue() {
return authFailAction_;
}
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @param value The enum numeric value on the wire for authFailAction to set.
* @return This builder for chaining.
*/
public Builder setAuthFailActionValue(int value) {
authFailAction_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @return The authFailAction.
*/
@java.lang.Override
public com.google.appengine.v1.AuthFailAction getAuthFailAction() {
com.google.appengine.v1.AuthFailAction result =
com.google.appengine.v1.AuthFailAction.forNumber(authFailAction_);
return result == null ? com.google.appengine.v1.AuthFailAction.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @param value The authFailAction to set.
* @return This builder for chaining.
*/
public Builder setAuthFailAction(com.google.appengine.v1.AuthFailAction value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
authFailAction_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Action to take when users access resources that require
* authentication. Defaults to `redirect`.
* </pre>
*
* <code>.google.appengine.v1.AuthFailAction auth_fail_action = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearAuthFailAction() {
bitField0_ = (bitField0_ & ~0x00000001);
authFailAction_ = 0;
onChanged();
return this;
}
private int login_ = 0;
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @return The enum numeric value on the wire for login.
*/
@java.lang.Override
public int getLoginValue() {
return login_;
}
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @param value The enum numeric value on the wire for login to set.
* @return This builder for chaining.
*/
public Builder setLoginValue(int value) {
login_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @return The login.
*/
@java.lang.Override
public com.google.appengine.v1.LoginRequirement getLogin() {
com.google.appengine.v1.LoginRequirement result =
com.google.appengine.v1.LoginRequirement.forNumber(login_);
return result == null ? com.google.appengine.v1.LoginRequirement.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @param value The login to set.
* @return This builder for chaining.
*/
public Builder setLogin(com.google.appengine.v1.LoginRequirement value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
login_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Level of login required to access this resource. Defaults to
* `optional`.
* </pre>
*
* <code>.google.appengine.v1.LoginRequirement login = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLogin() {
bitField0_ = (bitField0_ & ~0x00000002);
login_ = 0;
onChanged();
return this;
}
private java.lang.Object script_ = "";
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @return The script.
*/
public java.lang.String getScript() {
java.lang.Object ref = script_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
script_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @return The bytes for script.
*/
public com.google.protobuf.ByteString getScriptBytes() {
java.lang.Object ref = script_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
script_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @param value The script to set.
* @return This builder for chaining.
*/
public Builder setScript(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
script_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearScript() {
script_ = getDefaultInstance().getScript();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Path to the script from the application root directory.
* </pre>
*
* <code>string script = 3;</code>
*
* @param value The bytes for script to set.
* @return This builder for chaining.
*/
public Builder setScriptBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
script_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int securityLevel_ = 0;
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @return The enum numeric value on the wire for securityLevel.
*/
@java.lang.Override
public int getSecurityLevelValue() {
return securityLevel_;
}
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @param value The enum numeric value on the wire for securityLevel to set.
* @return This builder for chaining.
*/
public Builder setSecurityLevelValue(int value) {
securityLevel_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @return The securityLevel.
*/
@java.lang.Override
public com.google.appengine.v1.SecurityLevel getSecurityLevel() {
com.google.appengine.v1.SecurityLevel result =
com.google.appengine.v1.SecurityLevel.forNumber(securityLevel_);
return result == null ? com.google.appengine.v1.SecurityLevel.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @param value The securityLevel to set.
* @return This builder for chaining.
*/
public Builder setSecurityLevel(com.google.appengine.v1.SecurityLevel value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
securityLevel_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Security (HTTPS) enforcement for this URL.
* </pre>
*
* <code>.google.appengine.v1.SecurityLevel security_level = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearSecurityLevel() {
bitField0_ = (bitField0_ & ~0x00000008);
securityLevel_ = 0;
onChanged();
return this;
}
private java.lang.Object url_ = "";
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @return The url.
*/
public java.lang.String getUrl() {
java.lang.Object ref = url_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
url_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @return The bytes for url.
*/
public com.google.protobuf.ByteString getUrlBytes() {
java.lang.Object ref = url_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
url_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @param value The url to set.
* @return This builder for chaining.
*/
public Builder setUrl(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
url_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearUrl() {
url_ = getDefaultInstance().getUrl();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* URL to serve the endpoint at.
* </pre>
*
* <code>string url = 5;</code>
*
* @param value The bytes for url to set.
* @return This builder for chaining.
*/
public Builder setUrlBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
url_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.appengine.v1.ApiConfigHandler)
}
// @@protoc_insertion_point(class_scope:google.appengine.v1.ApiConfigHandler)
private static final com.google.appengine.v1.ApiConfigHandler DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.appengine.v1.ApiConfigHandler();
}
public static com.google.appengine.v1.ApiConfigHandler getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ApiConfigHandler> PARSER =
new com.google.protobuf.AbstractParser<ApiConfigHandler>() {
@java.lang.Override
public ApiConfigHandler parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ApiConfigHandler> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ApiConfigHandler> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.appengine.v1.ApiConfigHandler getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/usergrid | 37,984 | stack/rest/src/main/java/org/apache/usergrid/rest/applications/ServiceResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.rest.applications;
import com.amazonaws.AmazonServiceException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.annotation.JSONP;
import com.google.cloud.storage.StorageException;
import org.apache.commons.lang.StringUtils;
import org.apache.usergrid.management.OrganizationConfig;
import org.apache.usergrid.management.OrganizationConfigProps;
import org.apache.usergrid.persistence.Entity;
import org.apache.usergrid.persistence.EntityManager;
import org.apache.usergrid.persistence.Query;
import org.apache.usergrid.persistence.QueryUtils;
import org.apache.usergrid.rest.AbstractContextResource;
import org.apache.usergrid.rest.ApiResponse;
import org.apache.usergrid.rest.RootResource;
import org.apache.usergrid.rest.applications.assets.AssetsResource;
import org.apache.usergrid.rest.security.annotations.CheckPermissionsForPath;
import org.apache.usergrid.security.oauth.AccessInfo;
import org.apache.usergrid.security.shiro.utils.SubjectUtils;
import org.apache.usergrid.services.*;
import org.apache.usergrid.services.assets.BinaryStoreFactory;
import org.apache.usergrid.services.assets.data.*;
import org.apache.usergrid.services.exceptions.AwsPropertiesNotFoundException;
import org.apache.usergrid.utils.JsonUtils;
import org.glassfish.jersey.media.multipart.BodyPart;
import org.glassfish.jersey.media.multipart.BodyPartEntity;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanInfoFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.security.auth.Subject;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import java.io.InputStream;
import java.util.*;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static org.apache.usergrid.management.AccountCreationProps.PROPERTIES_USERGRID_BINARY_UPLOADER;
@Component
@Scope("prototype")
@Produces({
MediaType.APPLICATION_JSON, "application/javascript", "application/x-javascript", "text/ecmascript",
"application/ecmascript", "text/jscript"
})
public class ServiceResource extends AbstractContextResource {
protected static final Logger logger = LoggerFactory.getLogger( ServiceResource.class );
private static final String FILE_FIELD_NAME = "file";
private BinaryStore binaryStore;
@Autowired
private BinaryStoreFactory binaryStoreFactory;
protected ServiceManager services;
List<ServiceParameter> serviceParameters = null;
public ServiceResource() {
}
@Override
public void setParent( AbstractContextResource parent ) {
super.setParent( parent );
if ( parent instanceof ServiceResource ) {
services = ( ( ServiceResource ) parent ).services;
}
}
public ServiceResource getServiceResourceParent() {
if ( parent instanceof ServiceResource ) {
return ( ServiceResource ) parent;
}
return null;
}
public ServiceManager getServices() {
return services;
}
public UUID getApplicationId() {
return services.getApplicationId();
}
public String getOrganizationName() {
return services.getApplication().getOrganizationName();
}
public List<ServiceParameter> getServiceParameters() {
if ( serviceParameters != null ) {
return serviceParameters;
}
if ( getServiceResourceParent() != null ) {
return getServiceResourceParent().getServiceParameters();
}
serviceParameters = new ArrayList<>();
return serviceParameters;
}
public static List<ServiceParameter> addMatrixParams( List<ServiceParameter> parameters, UriInfo ui,
PathSegment ps ) throws Exception {
MultivaluedMap<String, String> params = ps.getMatrixParameters();
if ( params != null && params.size() > 0) {
Query query = Query.fromQueryParams( params );
if ( query != null ) {
parameters = ServiceParameter.addParameter( parameters, query );
}
}
return parameters;
}
public static List<ServiceParameter> addQueryParams( List<ServiceParameter> parameters, UriInfo ui )
throws Exception {
MultivaluedMap<String, String> params = ui.getQueryParameters();
if ( params != null && params.size() > 0) {
//TODO TN query parameters are not being correctly decoded here. The URL encoded strings
//aren't getting decoded properly
Query query = Query.fromQueryParams( params );
if(query == null && parameters.size() > 0 && parameters.get( 0 ).isId()){
query = Query.fromUUID( parameters.get( 0 ).getId() );
}
if ( query != null ) {
parameters = ServiceParameter.addParameter( parameters, query );
}
}
return parameters;
}
@Path("file")
public AbstractContextResource getFileResource( @Context UriInfo ui ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.getFileResource" );
}
ServiceParameter.addParameter( getServiceParameters(), "assets" );
PathSegment ps = getFirstPathSegment( "assets" );
if ( ps != null ) {
addMatrixParams( getServiceParameters(), ui, ps );
}
return getSubResource( AssetsResource.class );
}
@Path(RootResource.ENTITY_ID_PATH)
public AbstractContextResource addIdParameter( @Context UriInfo ui, @PathParam("entityId") PathSegment entityId )
throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.addIdParameter" );
}
UUID itemId = UUID.fromString( entityId.getPath() );
ServiceParameter.addParameter( getServiceParameters(), itemId );
addMatrixParams( getServiceParameters(), ui, entityId );
return getSubResource( ServiceResource.class );
}
@Path("{itemName}")
public AbstractContextResource addNameParameter( @Context UriInfo ui, @PathParam("itemName") PathSegment itemName )
throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.addNameParameter" );
logger.trace( "Current segment is {}", itemName.getPath() );
}
if ( itemName.getPath().startsWith( "{" ) ) {
Query query = Query.fromJsonString( itemName.getPath() );
if ( query != null ) {
ServiceParameter.addParameter( getServiceParameters(), query );
}
}
else {
ServiceParameter.addParameter( getServiceParameters(), itemName.getPath() );
}
addMatrixParams( getServiceParameters(), ui, itemName );
return getSubResource( ServiceResource.class );
}
public ServiceResults executeServiceGetRequestForSettings(UriInfo ui, ApiResponse response, ServiceAction action,
ServicePayload payload) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeServiceRequest" );
}
boolean tree = "true".equalsIgnoreCase( ui.getQueryParameters().getFirst( "tree" ) );
String connectionQueryParm = ui.getQueryParameters().getFirst("connections");
boolean returnInboundConnections = true;
boolean returnOutboundConnections = true;
addQueryParams( getServiceParameters(), ui );
ServiceRequest r = services.newRequest( action, tree, getServiceParameters(), payload,
returnInboundConnections, returnOutboundConnections, false, false);
response.setServiceRequest( r );
AbstractCollectionService abstractCollectionService = new AbstractCollectionService();
// abstractCollectionService
ServiceResults results = abstractCollectionService.getCollectionSettings( r );
// ServiceResults results = r.execute();
if ( results != null ) {
if ( results.hasData() ) {
response.setData( results.getData() );
}
if ( results.getServiceMetadata() != null ) {
response.setMetadata( results.getServiceMetadata() );
}
Query query = r.getLastQuery();
if ( query != null ) {
if ( query.hasSelectSubjects() ) {
response.setList( QueryUtils.getSelectionResults( query, results ) );
response.setCount( response.getList().size() );
response.setNext( results.getNextResult() );
response.setPath( results.getPath() );
return results;
}
}
response.setResults( results );
}
httpServletRequest.setAttribute( "applicationId", services.getApplicationId() );
return results;
}
public ServiceResults executeServicePostRequestForSettings(UriInfo ui, ApiResponse response, ServiceAction action,
ServicePayload payload) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeServiceRequest" );
}
boolean tree = "true".equalsIgnoreCase( ui.getQueryParameters().getFirst( "tree" ) );
String connectionQueryParm = ui.getQueryParameters().getFirst("connections");
boolean returnInboundConnections = true;
boolean returnOutboundConnections = true;
addQueryParams( getServiceParameters(), ui );
ServiceRequest r = services.newRequest( action, tree, getServiceParameters(), payload,
returnInboundConnections, returnOutboundConnections, false, false);
response.setServiceRequest( r );
AbstractCollectionService abstractCollectionService = new AbstractCollectionService();
ServiceResults results = abstractCollectionService.postCollectionSettings( r );
// ServiceResults results = r.execute();
if ( results != null ) {
if ( results.hasData() ) {
response.setData( results.getData() );
}
if ( results.getServiceMetadata() != null ) {
response.setMetadata( results.getServiceMetadata() );
}
Query query = r.getLastQuery();
if ( query != null ) {
if ( query.hasSelectSubjects() ) {
response.setList( QueryUtils.getSelectionResults( query, results ) );
response.setCount( response.getList().size() );
response.setNext( results.getNextResult() );
response.setPath( results.getPath() );
return results;
}
}
response.setResults( results );
}
httpServletRequest.setAttribute( "applicationId", services.getApplicationId() );
return results;
}
public ServiceResults executeServiceRequest( UriInfo ui, ApiResponse response, ServiceAction action,
ServicePayload payload ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeServiceRequest" );
}
boolean tree = "true".equalsIgnoreCase( ui.getQueryParameters().getFirst( "tree" ) );
String connectionQueryParm = ui.getQueryParameters().getFirst("connections");
boolean returnInboundConnections = true;
boolean returnOutboundConnections = true;
// connection info can be blocked only for GETs
if (action == ServiceAction.GET) {
if ("none".equalsIgnoreCase(connectionQueryParm)) {
returnInboundConnections = false;
returnOutboundConnections = false;
} else if ("in".equalsIgnoreCase(connectionQueryParm)) {
returnInboundConnections = true;
returnOutboundConnections = false;
} else if ("out".equalsIgnoreCase(connectionQueryParm)) {
returnInboundConnections = false;
returnOutboundConnections = true;
} else if ("all".equalsIgnoreCase(connectionQueryParm)) {
returnInboundConnections = true;
returnOutboundConnections = true;
} else {
if (connectionQueryParm != null) {
// unrecognized parameter
logger.error(String.format(
"Invalid connections query parameter=%s, ignoring.", connectionQueryParm));
}
// use the default query parameter functionality
OrganizationConfig orgConfig =
management.getOrganizationConfigForApplication(services.getApplicationId());
String defaultConnectionQueryParm =
orgConfig.getProperty(OrganizationConfigProps.ORGPROPERTIES_DEFAULT_CONNECTION_PARAM);
returnInboundConnections =
(defaultConnectionQueryParm.equals("in")) || (defaultConnectionQueryParm.equals("all"));
returnOutboundConnections =
(defaultConnectionQueryParm.equals("out")) || (defaultConnectionQueryParm.equals("all"));
}
}
boolean analyzeQueryOnly = Boolean.valueOf(ui.getQueryParameters().getFirst("analyzeOnly"));
boolean returnQuery = false;
// currently only allow query return if service admin
if (SubjectUtils.isServiceAdmin()) {
returnQuery = Boolean.valueOf(ui.getQueryParameters().getFirst("returnQuery"));
}
boolean collectionGet = false;
if ( action == ServiceAction.GET ) {
collectionGet = getServiceParameters().size() == 1;
}
addQueryParams( getServiceParameters(), ui );
ServiceRequest r = services.newRequest( action, tree, getServiceParameters(), payload,
returnInboundConnections, returnOutboundConnections, analyzeQueryOnly, returnQuery);
response.setServiceRequest( r );
ServiceResults results = r.execute();
if ( results != null ) {
if ( results.hasData() ) {
response.setData( results.getData() );
}
if ( results.getServiceMetadata() != null ) {
response.setMetadata( results.getServiceMetadata() );
}
Query query = r.getLastQuery();
if ( query != null ) {
if ( query.hasSelectSubjects() ) {
response.setList( QueryUtils.getSelectionResults( query, results ) );
response.setCount( response.getList().size() );
response.setNext( results.getNextResult() );
response.setPath( results.getPath() );
return results;
}
}
if ( collectionGet ) {
response.setCount( results.size() );
}
response.setResults( results );
}
httpServletRequest.setAttribute( "applicationId", services.getApplicationId() );
return results;
}
@CheckPermissionsForPath
@GET
@Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_HTML, "application/javascript"})
@JSONP
public ApiResponse executeGet( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeGet" );
}
ApiResponse response = createApiResponse();
response.setAction( "get" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
executeServiceRequest( ui, response, ServiceAction.GET, null );
return response;
}
@SuppressWarnings({ "unchecked" })
public ServicePayload getPayload( Object json ) {
ServicePayload payload = null;
json = JsonUtils.normalizeJsonTree( json );
if ( json instanceof Map ) {
Map<String, Object> jsonMap = ( Map<String, Object> ) json;
payload = ServicePayload.payload( jsonMap );
}
else if ( json instanceof List ) {
List<?> jsonList = ( List<?> ) json;
if ( jsonList.size() > 0 ) {
if ( jsonList.get( 0 ) instanceof UUID ) {
payload = ServicePayload.idListPayload( ( List<UUID> ) json );
}
else if ( jsonList.get( 0 ) instanceof Map ) {
payload = ServicePayload.batchPayload( ( List<Map<String, Object>> ) jsonList );
}
}
}
if ( payload == null ) {
payload = new ServicePayload();
}
return payload;
}
/**
* Necessary to work around inexplicable problems with EntityHolder.
* See above.
*/
public ApiResponse executePostWithObject( @Context UriInfo ui, Object json,
@QueryParam("callback") @DefaultValue("callback") String callback ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executePostWithMap" );
}
ApiResponse response = createApiResponse();
response.setAction( "post" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServicePayload payload = getPayload( json );
executeServiceRequest( ui, response, ServiceAction.POST, payload );
return response;
}
/**
* Necessary to work around inexplicable problems with EntityHolder.
* See above.
*/
public ApiResponse executePutWithMap( @Context UriInfo ui, Map<String, Object> json,
@QueryParam("callback") @DefaultValue("callback") String callback ) throws Exception {
ApiResponse response = createApiResponse();
response.setAction( "put" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServicePayload payload = getPayload( json );
executeServiceRequest( ui, response, ServiceAction.PUT, payload );
return response;
}
@CheckPermissionsForPath
@POST
@Consumes(MediaType.APPLICATION_JSON)
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
public ApiResponse executePost( @Context UriInfo ui, String body,
@QueryParam("callback") @DefaultValue("callback") String callback ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executePost: body = {}", body );
}
Object json;
if ( StringUtils.isEmpty( body ) ) {
json = null;
} else {
json = readJsonToObject( body );
}
ApiResponse response = createApiResponse();
response.setAction( "post" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServicePayload payload = getPayload( json );
executeServiceRequest( ui, response, ServiceAction.POST, payload );
return response;
}
@CheckPermissionsForPath
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
public ApiResponse executePut( @Context UriInfo ui, String body,
@QueryParam("callback") @DefaultValue("callback") String callback )
throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executePut" );
}
ObjectMapper mapper = new ObjectMapper();
Map<String, Object> json = mapper.readValue( body, mapTypeReference );
return executePutWithMap(ui, json, callback);
}
@CheckPermissionsForPath
@DELETE
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
public ApiResponse executeDelete(
@Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback,
@QueryParam("app_delete_confirm") String confirmAppDelete )
throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeDelete" );
}
ApiResponse response = createApiResponse();
response.setAction( "delete" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServiceResults sr = executeServiceRequest( ui, response, ServiceAction.DELETE, null );
// if we deleted an entity (and not a connection or collection) then
// we may need to clean up binary asset data associated with that entity
if ( !sr.getResultsType().equals( ServiceResults.Type.CONNECTION )
&& !sr.getResultsType().equals( ServiceResults.Type.COLLECTION )) {
for ( Entity entity : sr.getEntities() ) {
if ( entity.getProperty( AssetUtils.FILE_METADATA ) != null ) {
try {
binaryStore.delete( services.getApplicationId(), entity );
}catch(AwsPropertiesNotFoundException apnfe){
logger.error( "Amazon Property needed for this operation not found",apnfe );
response.setError( "500","Amazon Property needed for this operation not found",apnfe );
}
}
}
}
return response;
}
// TODO Temporarily removed until we test further
// @Produces("text/csv")
// @GET
// @RequireApplicationAccess
// @Consumes("text/csv")
// public String executeGetCsv(@Context UriInfo ui,
// @QueryParam("callback") @DefaultValue("callback") String callback)
// throws Exception {
// ui.getQueryParameters().putSingle("pad", "true");
// JSONWithPadding jsonp = executeGet(ui, callback);
//
// StringBuilder builder = new StringBuilder();
// if ((jsonp != null) && (jsonp.getJsonSource() instanceof ApiResponse)) {
// ApiResponse apiResponse = (ApiResponse) jsonp.getJsonSource();
// if ((apiResponse.getCounters() != null)
// && (apiResponse.getCounters().size() > 0)) {
// List<AggregateCounterSet> counters = apiResponse.getCounters();
// int size = counters.get(0).getValues().size();
// List<AggregateCounter> firstCounterList = counters.get(0)
// .getValues();
// if (size > 0) {
// builder.append("timestamp");
// for (AggregateCounterSet counterSet : counters) {
// builder.append(",");
// builder.append(counterSet.getName());
// }
// builder.append("\n");
// SimpleDateFormat formatter = new SimpleDateFormat(
// "yyyy-MM-dd HH:mm:ss.SSS");
// for (int i = 0; i < size; i++) {
// // yyyy-mm-dd hh:mm:ss.000
// builder.append(formatter.format(new Date(
// firstCounterList.get(i).getTimestamp())));
// for (AggregateCounterSet counterSet : counters) {
// List<AggregateCounter> counterList = counterSet
// .getValues();
// builder.append(",");
// builder.append(counterList.get(i).getValue());
// }
// builder.append("\n");
// }
// }
// } else if ((apiResponse.getEntities() != null)
// && (apiResponse.getEntities().size() > 0)) {
// for (Entity entity : apiResponse.getEntities()) {
// builder.append(entity.getUuid());
// builder.append(",");
// builder.append(entity.getType());
// builder.append(",");
// builder.append(mapToJsonString(entity));
// }
//
// }
// }
// return builder.toString();
// }
public static String wrapWithCallback( AccessInfo accessInfo, String callback ) {
return wrapWithCallback( JsonUtils.mapToJsonString( accessInfo ), callback );
}
public static String wrapWithCallback( String json, String callback ) {
if ( StringUtils.isNotBlank( callback ) ) {
json = callback + "(" + json + ")";
}
return json;
}
public static MediaType jsonMediaType( String callback ) {
return StringUtils.isNotBlank( callback ) ? new MediaType( "application", "javascript" ) : APPLICATION_JSON_TYPE;
}
/** ************** the following is file attachment (Asset) support ********************* */
@CheckPermissionsForPath
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
public ApiResponse executeMultiPartPost( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback,
FormDataMultiPart multiPart ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeMultiPartPost" );
}
return executeMultiPart( ui, callback, multiPart, ServiceAction.POST );
}
@CheckPermissionsForPath
@PUT
@Consumes(MediaType.MULTIPART_FORM_DATA)
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
public ApiResponse executeMultiPartPut( @Context UriInfo ui,
@QueryParam("callback") @DefaultValue("callback") String callback,
FormDataMultiPart multiPart ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeMultiPartPut" );
}
return executeMultiPart( ui, callback, multiPart, ServiceAction.PUT );
}
@JSONP
@Produces({MediaType.APPLICATION_JSON, "application/javascript"})
private ApiResponse executeMultiPart( UriInfo ui, String callback, FormDataMultiPart multiPart,
ServiceAction serviceAction ) throws Exception {
// needed for testing
this.binaryStore = binaryStoreFactory.getBinaryStore( properties.getProperty(PROPERTIES_USERGRID_BINARY_UPLOADER) );
// collect form data values
List<BodyPart> bodyParts = multiPart.getBodyParts();
HashMap<String, Object> data = new HashMap<>();
for ( BodyPart bp : bodyParts ) {
FormDataBodyPart bodyPart = ( FormDataBodyPart ) bp;
if ( bodyPart.getMediaType().equals( MediaType.TEXT_PLAIN_TYPE ) ) {
data.put( bodyPart.getName(), bodyPart.getValue() );
}
else {
if (logger.isTraceEnabled()) {
logger.trace("skipping bodyPart {} of media type {}", bodyPart.getName(), bodyPart.getMediaType());
}
}
}
FormDataBodyPart fileBodyPart = multiPart.getField( FILE_FIELD_NAME );
data.put( AssetUtils.FILE_METADATA, new HashMap() );
// process entity
ApiResponse response = createApiResponse();
response.setAction( serviceAction.name().toLowerCase() );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
//Updates entity with fields that are in text/plain as per loop above
if(data.get( FILE_FIELD_NAME )==null){
data.put( FILE_FIELD_NAME,null );
}
ServicePayload payload = getPayload( data );
ServiceResults serviceResults = executeServiceRequest( ui, response, serviceAction, payload );
// process file part
if ( fileBodyPart != null ) {
InputStream fileInput = ( (BodyPartEntity) fileBodyPart.getEntity() ).getInputStream();
if ( fileInput != null ) {
Entity entity = serviceResults.getEntity();
EntityManager em = emf.getEntityManager( getApplicationId() );
try {
binaryStore.write( getApplicationId(), entity, fileInput );
}
catch ( AwsPropertiesNotFoundException apnfe){
logger.error( "Amazon Property needed for this operation not found",apnfe );
response.setError( "500","Amazon Property needed for this operation not found",apnfe );
}
catch ( RuntimeException re){
logger.error(re.getMessage());
response.setError( "500", re );
}
//em.update( entity );
entity = serviceResults.getEntity();
serviceResults.setEntity( entity );
}
}
return response;
}
@CheckPermissionsForPath
@PUT
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
public Response uploadDataStreamPut( @Context UriInfo ui, InputStream uploadedInputStream ) throws Exception {
return uploadDataStream( ui, uploadedInputStream );
}
@CheckPermissionsForPath
@POST
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
public Response uploadDataStream( @Context UriInfo ui, InputStream uploadedInputStream ) throws Exception {
//needed for testing
this.binaryStore = binaryStoreFactory.getBinaryStore( properties.getProperty(PROPERTIES_USERGRID_BINARY_UPLOADER) );
ApiResponse response = createApiResponse();
response.setAction( "get" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServiceResults serviceResults = executeServiceRequest( ui, response, ServiceAction.GET, null );
Entity entity = serviceResults.getEntity();
try {
binaryStore.write( getApplicationId(), entity, uploadedInputStream );
}catch(AwsPropertiesNotFoundException apnfe){
logger.error( "Amazon Property needed for this operation not found",apnfe );
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}catch ( RuntimeException re ){
logger.error(re.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
EntityManager em = emf.getEntityManager( getApplicationId() );
em.update( entity );
return Response.status( 200 ).build();
}
@CheckPermissionsForPath
@GET
@Produces(MediaType.WILDCARD)
public Response executeStreamGet( @Context UriInfo ui, @PathParam("entityId") PathSegment entityId,
@HeaderParam("range") String rangeHeader,
@HeaderParam("if-modified-since") String modifiedSince ) throws Exception {
if(logger.isTraceEnabled()){
logger.trace( "ServiceResource.executeStreamGet" );
}
// needed for testing
this.binaryStore = binaryStoreFactory.getBinaryStore( properties.getProperty(PROPERTIES_USERGRID_BINARY_UPLOADER) );
ApiResponse response = createApiResponse();
response.setAction( "get" );
response.setApplication( services.getApplication() );
response.setParams( ui.getQueryParameters() );
ServiceResults serviceResults = executeServiceRequest( ui, response, ServiceAction.GET, null );
Entity entity = serviceResults.getEntity();
if(logger.isTraceEnabled()){
logger.trace( "In ServiceResource.executeStreamGet with id: {}, range: {}, modifiedSince: {}",
entityId, rangeHeader, modifiedSince );
}
Map<String, Object> fileMetadata = AssetUtils.getFileMetadata( entity );
// return a 302 if not modified
Date modified = AssetUtils.fromIfModifiedSince( modifiedSince );
if ( modified != null ) {
Long lastModified = ( Long ) fileMetadata.get( AssetUtils.LAST_MODIFIED );
if ( lastModified - modified.getTime() < 0 ) {
return Response.status( Response.Status.NOT_MODIFIED ).build();
}
}
boolean range = StringUtils.isNotBlank( rangeHeader );
long start = 0, end = 0, contentLength = 0;
InputStream inputStream;
if ( range ) { // honor range request, calculate start & end
String rangeValue = rangeHeader.trim().substring( "bytes=".length() );
contentLength = ( Long ) fileMetadata.get( AssetUtils.CONTENT_LENGTH );
end = contentLength - 1;
if ( rangeValue.startsWith( "-" ) ) {
start = contentLength - 1 - Long.parseLong( rangeValue.substring( "-".length() ) );
}
else {
String[] startEnd = rangeValue.split( "-" );
long parsedStart = Long.parseLong( startEnd[0] );
if ( parsedStart > start && parsedStart < end ) {
start = parsedStart;
}
if ( startEnd.length > 1 ) {
long parsedEnd = Long.parseLong( startEnd[1] );
if ( parsedEnd > start && parsedEnd < end ) {
end = parsedEnd;
}
}
}
try {
inputStream = binaryStore.read( getApplicationId(), entity, start, end - start );
}catch(AwsPropertiesNotFoundException apnfe){
logger.error( "Amazon Property needed for this operation not found",apnfe );
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}catch(RuntimeException re){
logger.error(re.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
else { // no range
try {
inputStream = binaryStore.read( getApplicationId(), entity );
}catch(AwsPropertiesNotFoundException apnfe){
logger.error( "Amazon Property needed for this operation not found",apnfe );
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
catch(AmazonServiceException ase){
if( ase.getStatusCode() > 499 ){
logger.error(ase.getMessage());
}else if(logger.isDebugEnabled()){
logger.debug(ase.getMessage());
}
return Response.status(ase.getStatusCode()).build();
}
catch (StorageException se){
if( se.getCode() > 499 ){
logger.error(se.getMessage());
}else if(logger.isDebugEnabled()){
logger.debug(se.getMessage());
}
return Response.status(se.getCode()).build();
}
catch(RuntimeException re){
logger.error(re.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
// return 404 if not found
if ( inputStream == null ) {
return Response.status( Response.Status.NOT_FOUND ).build();
}
Long lastModified = ( Long ) fileMetadata.get( AssetUtils.LAST_MODIFIED );
Response.ResponseBuilder responseBuilder =
Response.ok( inputStream ).type( ( String ) fileMetadata.get( AssetUtils.CONTENT_TYPE ) )
.lastModified( new Date( lastModified ) );
if ( fileMetadata.get( AssetUtils.E_TAG ) != null ) {
responseBuilder.tag( ( String ) fileMetadata.get( AssetUtils.E_TAG ) );
}
if ( range ) {
responseBuilder.header( "Content-Range", "bytes " + start + "-" + end + "/" + contentLength );
}
return responseBuilder.build();
}
}
|
googleapis/google-cloud-java | 37,794 | java-edgenetwork/proto-google-cloud-edgenetwork-v1/src/main/java/com/google/cloud/edgenetwork/v1/ListZonesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/edgenetwork/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.edgenetwork.v1;
/**
*
*
* <pre>
* Deprecated: not implemented.
* Message for requesting list of Zones
* </pre>
*
* Protobuf type {@code google.cloud.edgenetwork.v1.ListZonesRequest}
*/
@java.lang.Deprecated
public final class ListZonesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.edgenetwork.v1.ListZonesRequest)
ListZonesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListZonesRequest.newBuilder() to construct.
private ListZonesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListZonesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListZonesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListZonesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListZonesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.edgenetwork.v1.ListZonesRequest.class,
com.google.cloud.edgenetwork.v1.ListZonesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.edgenetwork.v1.ListZonesRequest)) {
return super.equals(obj);
}
com.google.cloud.edgenetwork.v1.ListZonesRequest other =
(com.google.cloud.edgenetwork.v1.ListZonesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.edgenetwork.v1.ListZonesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Deprecated: not implemented.
* Message for requesting list of Zones
* </pre>
*
* Protobuf type {@code google.cloud.edgenetwork.v1.ListZonesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.edgenetwork.v1.ListZonesRequest)
com.google.cloud.edgenetwork.v1.ListZonesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListZonesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListZonesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.edgenetwork.v1.ListZonesRequest.class,
com.google.cloud.edgenetwork.v1.ListZonesRequest.Builder.class);
}
// Construct using com.google.cloud.edgenetwork.v1.ListZonesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.edgenetwork.v1.ServiceProto
.internal_static_google_cloud_edgenetwork_v1_ListZonesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListZonesRequest getDefaultInstanceForType() {
return com.google.cloud.edgenetwork.v1.ListZonesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListZonesRequest build() {
com.google.cloud.edgenetwork.v1.ListZonesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListZonesRequest buildPartial() {
com.google.cloud.edgenetwork.v1.ListZonesRequest result =
new com.google.cloud.edgenetwork.v1.ListZonesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.edgenetwork.v1.ListZonesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.edgenetwork.v1.ListZonesRequest) {
return mergeFrom((com.google.cloud.edgenetwork.v1.ListZonesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.edgenetwork.v1.ListZonesRequest other) {
if (other == com.google.cloud.edgenetwork.v1.ListZonesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListZonesRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.edgenetwork.v1.ListZonesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.edgenetwork.v1.ListZonesRequest)
private static final com.google.cloud.edgenetwork.v1.ListZonesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.edgenetwork.v1.ListZonesRequest();
}
public static com.google.cloud.edgenetwork.v1.ListZonesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListZonesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListZonesRequest>() {
@java.lang.Override
public ListZonesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListZonesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListZonesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.edgenetwork.v1.ListZonesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,952 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/SearchKnowledgeResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/conversation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The response message for
* [Conversations.SearchKnowledge][google.cloud.dialogflow.v2.Conversations.SearchKnowledge].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.SearchKnowledgeResponse}
*/
public final class SearchKnowledgeResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.SearchKnowledgeResponse)
SearchKnowledgeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchKnowledgeResponse.newBuilder() to construct.
private SearchKnowledgeResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchKnowledgeResponse() {
answers_ = java.util.Collections.emptyList();
rewrittenQuery_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchKnowledgeResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_SearchKnowledgeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_SearchKnowledgeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.class,
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.Builder.class);
}
public static final int ANSWERS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer> answers_;
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer> getAnswersList() {
return answers_;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder>
getAnswersOrBuilderList() {
return answers_;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
@java.lang.Override
public int getAnswersCount() {
return answers_.size();
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer getAnswers(int index) {
return answers_.get(index);
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder getAnswersOrBuilder(
int index) {
return answers_.get(index);
}
public static final int REWRITTEN_QUERY_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object rewrittenQuery_ = "";
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @return The rewrittenQuery.
*/
@java.lang.Override
public java.lang.String getRewrittenQuery() {
java.lang.Object ref = rewrittenQuery_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
rewrittenQuery_ = s;
return s;
}
}
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @return The bytes for rewrittenQuery.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRewrittenQueryBytes() {
java.lang.Object ref = rewrittenQuery_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
rewrittenQuery_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < answers_.size(); i++) {
output.writeMessage(2, answers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rewrittenQuery_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rewrittenQuery_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < answers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, answers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rewrittenQuery_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rewrittenQuery_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.SearchKnowledgeResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse other =
(com.google.cloud.dialogflow.v2.SearchKnowledgeResponse) obj;
if (!getAnswersList().equals(other.getAnswersList())) return false;
if (!getRewrittenQuery().equals(other.getRewrittenQuery())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnswersCount() > 0) {
hash = (37 * hash) + ANSWERS_FIELD_NUMBER;
hash = (53 * hash) + getAnswersList().hashCode();
}
hash = (37 * hash) + REWRITTEN_QUERY_FIELD_NUMBER;
hash = (53 * hash) + getRewrittenQuery().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Conversations.SearchKnowledge][google.cloud.dialogflow.v2.Conversations.SearchKnowledge].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.SearchKnowledgeResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.SearchKnowledgeResponse)
com.google.cloud.dialogflow.v2.SearchKnowledgeResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_SearchKnowledgeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_SearchKnowledgeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.class,
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (answersBuilder_ == null) {
answers_ = java.util.Collections.emptyList();
} else {
answers_ = null;
answersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
rewrittenQuery_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_SearchKnowledgeResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeResponse build() {
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeResponse buildPartial() {
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse result =
new com.google.cloud.dialogflow.v2.SearchKnowledgeResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2.SearchKnowledgeResponse result) {
if (answersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
answers_ = java.util.Collections.unmodifiableList(answers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.answers_ = answers_;
} else {
result.answers_ = answersBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2.SearchKnowledgeResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.rewrittenQuery_ = rewrittenQuery_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.SearchKnowledgeResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2.SearchKnowledgeResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.SearchKnowledgeResponse other) {
if (other == com.google.cloud.dialogflow.v2.SearchKnowledgeResponse.getDefaultInstance())
return this;
if (answersBuilder_ == null) {
if (!other.answers_.isEmpty()) {
if (answers_.isEmpty()) {
answers_ = other.answers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnswersIsMutable();
answers_.addAll(other.answers_);
}
onChanged();
}
} else {
if (!other.answers_.isEmpty()) {
if (answersBuilder_.isEmpty()) {
answersBuilder_.dispose();
answersBuilder_ = null;
answers_ = other.answers_;
bitField0_ = (bitField0_ & ~0x00000001);
answersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnswersFieldBuilder()
: null;
} else {
answersBuilder_.addAllMessages(other.answers_);
}
}
}
if (!other.getRewrittenQuery().isEmpty()) {
rewrittenQuery_ = other.rewrittenQuery_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer m =
input.readMessage(
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.parser(),
extensionRegistry);
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
answers_.add(m);
} else {
answersBuilder_.addMessage(m);
}
break;
} // case 18
case 26:
{
rewrittenQuery_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer> answers_ =
java.util.Collections.emptyList();
private void ensureAnswersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
answers_ =
new java.util.ArrayList<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer>(answers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder>
answersBuilder_;
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer> getAnswersList() {
if (answersBuilder_ == null) {
return java.util.Collections.unmodifiableList(answers_);
} else {
return answersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public int getAnswersCount() {
if (answersBuilder_ == null) {
return answers_.size();
} else {
return answersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer getAnswers(int index) {
if (answersBuilder_ == null) {
return answers_.get(index);
} else {
return answersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder setAnswers(
int index, com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer value) {
if (answersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnswersIsMutable();
answers_.set(index, value);
onChanged();
} else {
answersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder setAnswers(
int index, com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder builderForValue) {
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
answers_.set(index, builderForValue.build());
onChanged();
} else {
answersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder addAnswers(com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer value) {
if (answersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnswersIsMutable();
answers_.add(value);
onChanged();
} else {
answersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder addAnswers(
int index, com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer value) {
if (answersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnswersIsMutable();
answers_.add(index, value);
onChanged();
} else {
answersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder addAnswers(
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder builderForValue) {
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
answers_.add(builderForValue.build());
onChanged();
} else {
answersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder addAnswers(
int index, com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder builderForValue) {
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
answers_.add(index, builderForValue.build());
onChanged();
} else {
answersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder addAllAnswers(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer> values) {
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, answers_);
onChanged();
} else {
answersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder clearAnswers() {
if (answersBuilder_ == null) {
answers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
answersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public Builder removeAnswers(int index) {
if (answersBuilder_ == null) {
ensureAnswersIsMutable();
answers_.remove(index);
onChanged();
} else {
answersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder getAnswersBuilder(
int index) {
return getAnswersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder getAnswersOrBuilder(
int index) {
if (answersBuilder_ == null) {
return answers_.get(index);
} else {
return answersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder>
getAnswersOrBuilderList() {
if (answersBuilder_ != null) {
return answersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(answers_);
}
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder addAnswersBuilder() {
return getAnswersFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.getDefaultInstance());
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder addAnswersBuilder(
int index) {
return getAnswersFieldBuilder()
.addBuilder(
index, com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.getDefaultInstance());
}
/**
*
*
* <pre>
* Most relevant snippets extracted from articles in the given knowledge base,
* ordered by confidence.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.SearchKnowledgeAnswer answers = 2;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder>
getAnswersBuilderList() {
return getAnswersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder>
getAnswersFieldBuilder() {
if (answersBuilder_ == null) {
answersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswer.Builder,
com.google.cloud.dialogflow.v2.SearchKnowledgeAnswerOrBuilder>(
answers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
answers_ = null;
}
return answersBuilder_;
}
private java.lang.Object rewrittenQuery_ = "";
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @return The rewrittenQuery.
*/
public java.lang.String getRewrittenQuery() {
java.lang.Object ref = rewrittenQuery_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
rewrittenQuery_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @return The bytes for rewrittenQuery.
*/
public com.google.protobuf.ByteString getRewrittenQueryBytes() {
java.lang.Object ref = rewrittenQuery_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
rewrittenQuery_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @param value The rewrittenQuery to set.
* @return This builder for chaining.
*/
public Builder setRewrittenQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
rewrittenQuery_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRewrittenQuery() {
rewrittenQuery_ = getDefaultInstance().getRewrittenQuery();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The rewritten query used to search knowledge.
* </pre>
*
* <code>string rewritten_query = 3;</code>
*
* @param value The bytes for rewrittenQuery to set.
* @return This builder for chaining.
*/
public Builder setRewrittenQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
rewrittenQuery_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.SearchKnowledgeResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.SearchKnowledgeResponse)
private static final com.google.cloud.dialogflow.v2.SearchKnowledgeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.SearchKnowledgeResponse();
}
public static com.google.cloud.dialogflow.v2.SearchKnowledgeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchKnowledgeResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchKnowledgeResponse>() {
@java.lang.Override
public SearchKnowledgeResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchKnowledgeResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchKnowledgeResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.SearchKnowledgeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,961 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListTensorboardsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/tensorboard_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListTensorboardsResponse}
*/
public final class ListTensorboardsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListTensorboardsResponse)
ListTensorboardsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTensorboardsResponse.newBuilder() to construct.
private ListTensorboardsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTensorboardsResponse() {
tensorboards_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTensorboardsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_ListTensorboardsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_ListTensorboardsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListTensorboardsResponse.class,
com.google.cloud.aiplatform.v1.ListTensorboardsResponse.Builder.class);
}
public static final int TENSORBOARDS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Tensorboard> tensorboards_;
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Tensorboard> getTensorboardsList() {
return tensorboards_;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorboardOrBuilder>
getTensorboardsOrBuilderList() {
return tensorboards_;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
@java.lang.Override
public int getTensorboardsCount() {
return tensorboards_.size();
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Tensorboard getTensorboards(int index) {
return tensorboards_.get(index);
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.TensorboardOrBuilder getTensorboardsOrBuilder(int index) {
return tensorboards_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tensorboards_.size(); i++) {
output.writeMessage(1, tensorboards_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tensorboards_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tensorboards_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListTensorboardsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListTensorboardsResponse other =
(com.google.cloud.aiplatform.v1.ListTensorboardsResponse) obj;
if (!getTensorboardsList().equals(other.getTensorboardsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTensorboardsCount() > 0) {
hash = (37 * hash) + TENSORBOARDS_FIELD_NUMBER;
hash = (53 * hash) + getTensorboardsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ListTensorboardsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListTensorboardsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListTensorboardsResponse)
com.google.cloud.aiplatform.v1.ListTensorboardsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_ListTensorboardsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_ListTensorboardsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListTensorboardsResponse.class,
com.google.cloud.aiplatform.v1.ListTensorboardsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListTensorboardsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tensorboardsBuilder_ == null) {
tensorboards_ = java.util.Collections.emptyList();
} else {
tensorboards_ = null;
tensorboardsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_ListTensorboardsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListTensorboardsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListTensorboardsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListTensorboardsResponse build() {
com.google.cloud.aiplatform.v1.ListTensorboardsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListTensorboardsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListTensorboardsResponse result =
new com.google.cloud.aiplatform.v1.ListTensorboardsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListTensorboardsResponse result) {
if (tensorboardsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tensorboards_ = java.util.Collections.unmodifiableList(tensorboards_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tensorboards_ = tensorboards_;
} else {
result.tensorboards_ = tensorboardsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListTensorboardsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListTensorboardsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListTensorboardsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListTensorboardsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListTensorboardsResponse.getDefaultInstance())
return this;
if (tensorboardsBuilder_ == null) {
if (!other.tensorboards_.isEmpty()) {
if (tensorboards_.isEmpty()) {
tensorboards_ = other.tensorboards_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTensorboardsIsMutable();
tensorboards_.addAll(other.tensorboards_);
}
onChanged();
}
} else {
if (!other.tensorboards_.isEmpty()) {
if (tensorboardsBuilder_.isEmpty()) {
tensorboardsBuilder_.dispose();
tensorboardsBuilder_ = null;
tensorboards_ = other.tensorboards_;
bitField0_ = (bitField0_ & ~0x00000001);
tensorboardsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTensorboardsFieldBuilder()
: null;
} else {
tensorboardsBuilder_.addAllMessages(other.tensorboards_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Tensorboard m =
input.readMessage(
com.google.cloud.aiplatform.v1.Tensorboard.parser(), extensionRegistry);
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
tensorboards_.add(m);
} else {
tensorboardsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Tensorboard> tensorboards_ =
java.util.Collections.emptyList();
private void ensureTensorboardsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tensorboards_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.Tensorboard>(tensorboards_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensorboard,
com.google.cloud.aiplatform.v1.Tensorboard.Builder,
com.google.cloud.aiplatform.v1.TensorboardOrBuilder>
tensorboardsBuilder_;
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Tensorboard> getTensorboardsList() {
if (tensorboardsBuilder_ == null) {
return java.util.Collections.unmodifiableList(tensorboards_);
} else {
return tensorboardsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public int getTensorboardsCount() {
if (tensorboardsBuilder_ == null) {
return tensorboards_.size();
} else {
return tensorboardsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensorboard getTensorboards(int index) {
if (tensorboardsBuilder_ == null) {
return tensorboards_.get(index);
} else {
return tensorboardsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder setTensorboards(int index, com.google.cloud.aiplatform.v1.Tensorboard value) {
if (tensorboardsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardsIsMutable();
tensorboards_.set(index, value);
onChanged();
} else {
tensorboardsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder setTensorboards(
int index, com.google.cloud.aiplatform.v1.Tensorboard.Builder builderForValue) {
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
tensorboards_.set(index, builderForValue.build());
onChanged();
} else {
tensorboardsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder addTensorboards(com.google.cloud.aiplatform.v1.Tensorboard value) {
if (tensorboardsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardsIsMutable();
tensorboards_.add(value);
onChanged();
} else {
tensorboardsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder addTensorboards(int index, com.google.cloud.aiplatform.v1.Tensorboard value) {
if (tensorboardsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardsIsMutable();
tensorboards_.add(index, value);
onChanged();
} else {
tensorboardsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder addTensorboards(
com.google.cloud.aiplatform.v1.Tensorboard.Builder builderForValue) {
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
tensorboards_.add(builderForValue.build());
onChanged();
} else {
tensorboardsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder addTensorboards(
int index, com.google.cloud.aiplatform.v1.Tensorboard.Builder builderForValue) {
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
tensorboards_.add(index, builderForValue.build());
onChanged();
} else {
tensorboardsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder addAllTensorboards(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Tensorboard> values) {
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tensorboards_);
onChanged();
} else {
tensorboardsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder clearTensorboards() {
if (tensorboardsBuilder_ == null) {
tensorboards_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tensorboardsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public Builder removeTensorboards(int index) {
if (tensorboardsBuilder_ == null) {
ensureTensorboardsIsMutable();
tensorboards_.remove(index);
onChanged();
} else {
tensorboardsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensorboard.Builder getTensorboardsBuilder(int index) {
return getTensorboardsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public com.google.cloud.aiplatform.v1.TensorboardOrBuilder getTensorboardsOrBuilder(int index) {
if (tensorboardsBuilder_ == null) {
return tensorboards_.get(index);
} else {
return tensorboardsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorboardOrBuilder>
getTensorboardsOrBuilderList() {
if (tensorboardsBuilder_ != null) {
return tensorboardsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tensorboards_);
}
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensorboard.Builder addTensorboardsBuilder() {
return getTensorboardsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Tensorboard.getDefaultInstance());
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Tensorboard.Builder addTensorboardsBuilder(int index) {
return getTensorboardsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Tensorboard.getDefaultInstance());
}
/**
*
*
* <pre>
* The Tensorboards mathching the request.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Tensorboard tensorboards = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Tensorboard.Builder>
getTensorboardsBuilderList() {
return getTensorboardsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensorboard,
com.google.cloud.aiplatform.v1.Tensorboard.Builder,
com.google.cloud.aiplatform.v1.TensorboardOrBuilder>
getTensorboardsFieldBuilder() {
if (tensorboardsBuilder_ == null) {
tensorboardsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Tensorboard,
com.google.cloud.aiplatform.v1.Tensorboard.Builder,
com.google.cloud.aiplatform.v1.TensorboardOrBuilder>(
tensorboards_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
tensorboards_ = null;
}
return tensorboardsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListTensorboardsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListTensorboardsResponse)
private static final com.google.cloud.aiplatform.v1.ListTensorboardsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListTensorboardsResponse();
}
public static com.google.cloud.aiplatform.v1.ListTensorboardsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTensorboardsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTensorboardsResponse>() {
@java.lang.Override
public ListTensorboardsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTensorboardsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTensorboardsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListTensorboardsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graalpython | 38,357 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/modules/cext/PythonCextObjectBuiltins.java | /*
* Copyright (c) 2021, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.builtins.modules.cext;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.NotImplementedError;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.TypeError;
import static com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiCallPath.Direct;
import static com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiCallPath.Ignored;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.ConstCharPtrAsTruffleString;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Int;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Pointer;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectConstPtr;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectRawPointer;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectTransfer;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectWrapper;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyThreadState;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyVarObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Py_hash_t;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Py_ssize_t;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.VA_LIST_PTR;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Void;
import static com.oracle.graal.python.builtins.objects.ints.PInt.intValue;
import static com.oracle.graal.python.nodes.ErrorMessages.UNHASHABLE_TYPE_P;
import static com.oracle.graal.python.nodes.SpecialMethodNames.T___BYTES__;
import static com.oracle.graal.python.nodes.StringLiterals.T_JAVA;
import static com.oracle.graal.python.util.PythonUtils.TS_ENCODING;
import java.io.PrintWriter;
import com.oracle.graal.python.PythonLanguage;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.builtins.modules.BuiltinFunctions.FormatNode;
import com.oracle.graal.python.builtins.modules.BuiltinFunctions.IsInstanceNode;
import com.oracle.graal.python.builtins.modules.BuiltinFunctions.IsSubClassNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApi5BuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiBinaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiBuiltin;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiNullaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiQuaternaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiTernaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiUnaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CastArgsNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CastKwargsNode;
import com.oracle.graal.python.builtins.objects.PNone;
import com.oracle.graal.python.builtins.objects.PNotImplemented;
import com.oracle.graal.python.builtins.objects.bytes.BytesNodes;
import com.oracle.graal.python.builtins.objects.bytes.BytesUtils;
import com.oracle.graal.python.builtins.objects.bytes.PBytes;
import com.oracle.graal.python.builtins.objects.cext.capi.CApiGuards;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes.ResolvePointerNode;
import com.oracle.graal.python.builtins.objects.cext.capi.PythonNativeWrapper;
import com.oracle.graal.python.builtins.objects.cext.capi.PythonNativeWrapper.PythonAbstractObjectNativeWrapper;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions.HandlePointerConverter;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions.NativeToPythonNode;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions.PythonToNativeNode;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions.ToPythonWrapperNode;
import com.oracle.graal.python.builtins.objects.cext.capi.transitions.CApiTransitions.UpdateStrongRefNode;
import com.oracle.graal.python.builtins.objects.cext.common.GetNextVaArgNode;
import com.oracle.graal.python.builtins.objects.cext.structs.CFields;
import com.oracle.graal.python.builtins.objects.cext.structs.CStructAccess;
import com.oracle.graal.python.builtins.objects.common.SequenceNodes;
import com.oracle.graal.python.builtins.objects.common.SequenceStorageNodes;
import com.oracle.graal.python.builtins.objects.dict.PDict;
import com.oracle.graal.python.builtins.objects.function.PKeyword;
import com.oracle.graal.python.builtins.objects.object.ObjectBuiltins.GetAttributeNode;
import com.oracle.graal.python.builtins.objects.object.ObjectBuiltins.SetattrNode;
import com.oracle.graal.python.builtins.objects.tuple.PTuple;
import com.oracle.graal.python.builtins.objects.type.TypeNodes;
import com.oracle.graal.python.lib.PyBytesCheckNode;
import com.oracle.graal.python.lib.PyCallableCheckNode;
import com.oracle.graal.python.lib.PyLongCheckNode;
import com.oracle.graal.python.lib.PyObjectAsFileDescriptor;
import com.oracle.graal.python.lib.PyObjectAsciiNode;
import com.oracle.graal.python.lib.PyObjectCallMethodObjArgs;
import com.oracle.graal.python.lib.PyObjectDelItem;
import com.oracle.graal.python.lib.PyObjectDir;
import com.oracle.graal.python.lib.PyObjectGetAttrO;
import com.oracle.graal.python.lib.PyObjectGetIter;
import com.oracle.graal.python.lib.PyObjectHashNode;
import com.oracle.graal.python.lib.PyObjectIsTrueNode;
import com.oracle.graal.python.lib.PyObjectLookupAttrO;
import com.oracle.graal.python.lib.PyObjectReprAsObjectNode;
import com.oracle.graal.python.lib.PyObjectSetItem;
import com.oracle.graal.python.lib.PyObjectStrAsObjectNode;
import com.oracle.graal.python.nodes.BuiltinNames;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.nodes.StringLiterals;
import com.oracle.graal.python.nodes.argument.keywords.ExpandKeywordStarargsNode;
import com.oracle.graal.python.nodes.call.CallNode;
import com.oracle.graal.python.nodes.call.special.CallUnaryMethodNode;
import com.oracle.graal.python.nodes.call.special.LookupSpecialMethodNode;
import com.oracle.graal.python.nodes.object.GetClassNode;
import com.oracle.graal.python.nodes.object.GetOrCreateDictNode;
import com.oracle.graal.python.nodes.object.IsNode;
import com.oracle.graal.python.nodes.util.CannotCastException;
import com.oracle.graal.python.nodes.util.CastToJavaStringNode;
import com.oracle.graal.python.nodes.util.CastToTruffleStringNode;
import com.oracle.graal.python.runtime.PosixSupportLibrary;
import com.oracle.graal.python.runtime.PythonContext;
import com.oracle.graal.python.runtime.exception.PException;
import com.oracle.graal.python.runtime.object.PFactory;
import com.oracle.graal.python.runtime.sequence.PSequence;
import com.oracle.graal.python.runtime.sequence.storage.ArrayBasedSequenceStorage;
import com.oracle.graal.python.runtime.sequence.storage.EmptySequenceStorage;
import com.oracle.graal.python.runtime.sequence.storage.NativeSequenceStorage;
import com.oracle.graal.python.runtime.sequence.storage.SequenceStorage;
import com.oracle.graal.python.util.PythonUtils;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.Fallback;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.interop.InteropException;
import com.oracle.truffle.api.interop.InteropLibrary;
import com.oracle.truffle.api.library.CachedLibrary;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.profiles.InlinedBranchProfile;
import com.oracle.truffle.api.profiles.InlinedConditionProfile;
import com.oracle.truffle.api.strings.TruffleString;
public abstract class PythonCextObjectBuiltins {
private PythonCextObjectBuiltins() {
}
@CApiBuiltin(ret = Void, args = {PyObjectWrapper, Py_ssize_t}, call = Ignored)
abstract static class GraalPyPrivate_NotifyRefCount extends CApiBinaryBuiltinNode {
@Specialization
static Object doGeneric(PythonAbstractObjectNativeWrapper wrapper, long refCount,
@Bind Node inliningTarget,
@Cached UpdateStrongRefNode updateRefNode) {
assert CApiTransitions.readNativeRefCount(HandlePointerConverter.pointerToStub(wrapper.getNativePointer())) == refCount;
// refcounting on an immortal object should be a NOP
assert refCount != PythonAbstractObjectNativeWrapper.IMMORTAL_REFCNT;
updateRefNode.execute(inliningTarget, wrapper, refCount);
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = Void, args = {Pointer, Int}, call = Ignored)
abstract static class GraalPyPrivate_BulkNotifyRefCount extends CApiBinaryBuiltinNode {
@Specialization
static Object doGeneric(Object arrayPointer, int len,
@Bind Node inliningTarget,
@Cached UpdateStrongRefNode updateRefNode,
@Cached CStructAccess.ReadPointerNode readPointerNode,
@Cached ToPythonWrapperNode toPythonWrapperNode) {
/*
* It may happen that due to several inc- and decrefs applied to a borrowed reference,
* that the same pointer is in the list several times. To avoid crashes, we do the
* processing in two phases: first, we resolve the pointers to wrappers and second, we
* update the reference counts. In this way, we avoid that a reference is made weak when
* processed the first time and may then be invalid if processed the second time.
*/
PythonNativeWrapper[] resolved = new PythonNativeWrapper[len];
for (int i = 0; i < resolved.length; i++) {
Object elem = readPointerNode.readArrayElement(arrayPointer, i);
resolved[i] = toPythonWrapperNode.executeWrapper(elem, false);
}
for (int i = 0; i < resolved.length; i++) {
if (resolved[i] instanceof PythonAbstractObjectNativeWrapper objectNativeWrapper) {
long refCount = CApiTransitions.readNativeRefCount(HandlePointerConverter.pointerToStub(objectNativeWrapper.getNativePointer()));
// refcounting on an immortal object should be a NOP
assert refCount != PythonAbstractObjectNativeWrapper.IMMORTAL_REFCNT;
updateRefNode.execute(inliningTarget, objectNativeWrapper, refCount);
}
}
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, PyObject, PyObject, Int}, call = Ignored)
abstract static class GraalPyPrivate_Object_Call1 extends CApiQuaternaryBuiltinNode {
@Specialization
static Object doGeneric(Object callable, Object argsObj, Object kwargsObj, int singleArg,
@Bind Node inliningTarget,
@Cached CastArgsNode castArgsNode,
@Cached CastKwargsNode castKwargsNode,
@Cached CallNode callNode) {
Object[] args;
if (singleArg != 0) {
args = new Object[]{argsObj};
} else {
args = castArgsNode.execute(null, inliningTarget, argsObj);
}
PKeyword[] keywords = castKwargsNode.execute(inliningTarget, kwargsObj);
return callNode.execute(null, callable, args, keywords);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, VA_LIST_PTR}, call = Ignored)
abstract static class GraalPyPrivate_Object_CallFunctionObjArgs extends CApiBinaryBuiltinNode {
@Specialization
static Object doFunction(Object callable, Object vaList,
@Bind Node inliningTarget,
@Cached GetNextVaArgNode getVaArgs,
@CachedLibrary(limit = "2") InteropLibrary argLib,
@Cached CallNode callNode,
@Cached NativeToPythonNode toJavaNode) {
return callFunction(inliningTarget, callable, vaList, getVaArgs, argLib, callNode, toJavaNode);
}
static Object callFunction(Node inliningTarget, Object callable, Object vaList,
GetNextVaArgNode getVaArgs,
InteropLibrary argLib,
CallNode callNode,
NativeToPythonNode toJavaNode) {
/*
* Function 'PyObject_CallFunctionObjArgs' expects a va_list that contains just
* 'PyObject *' and is terminated by 'NULL'.
*/
Object[] args = new Object[4];
int filled = 0;
while (true) {
Object object;
try {
object = getVaArgs.execute(inliningTarget, vaList);
} catch (InteropException e) {
throw CompilerDirectives.shouldNotReachHere();
}
if (argLib.isNull(object)) {
break;
}
if (filled >= args.length) {
args = PythonUtils.arrayCopyOf(args, args.length * 2);
}
args[filled++] = toJavaNode.execute(object);
}
if (filled < args.length) {
args = PythonUtils.arrayCopyOf(args, filled);
}
return callNode.executeWithoutFrame(callable, args);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, PyObject, VA_LIST_PTR}, call = Ignored)
abstract static class GraalPyPrivate_Object_CallMethodObjArgs extends CApiTernaryBuiltinNode {
@Specialization
static Object doMethod(Object receiver, Object methodName, Object vaList,
@Bind Node inliningTarget,
@Cached GetNextVaArgNode getVaArgs,
@CachedLibrary(limit = "2") InteropLibrary argLib,
@Cached CallNode callNode,
@Cached PyObjectGetAttrO getAnyAttributeNode,
@Cached NativeToPythonNode toJavaNode) {
Object method = getAnyAttributeNode.execute(null, inliningTarget, receiver, methodName);
return GraalPyPrivate_Object_CallFunctionObjArgs.callFunction(inliningTarget, method, vaList, getVaArgs, argLib, callNode, toJavaNode);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, ConstCharPtrAsTruffleString, PyObject, Int}, call = Ignored)
abstract static class GraalPyPrivate_Object_CallMethod1 extends CApiQuaternaryBuiltinNode {
@Specialization
static Object doGeneric(Object receiver, TruffleString methodName, Object argsObj, int singleArg,
@Bind Node inliningTarget,
@Cached PyObjectCallMethodObjArgs callMethod,
@Cached CastArgsNode castArgsNode) {
Object[] args;
if (singleArg != 0) {
args = new Object[]{argsObj};
} else {
args = castArgsNode.execute(null, inliningTarget, argsObj);
}
return callMethod.execute(null, inliningTarget, receiver, methodName, args);
}
}
// directly called without landing function
@CApiBuiltin(ret = PyObjectTransfer, args = {PyThreadState, PyObject, PyObjectConstPtr, Py_ssize_t, PyObject}, call = Direct)
abstract static class _PyObject_MakeTpCall extends CApi5BuiltinNode {
@Specialization
static Object doGeneric(@SuppressWarnings("unused") Object threadState, Object callable, Object argsArray, long nargs, Object kwargs,
@Cached CStructAccess.ReadObjectNode readNode,
@Bind Node inliningTarget,
@Cached CStructAccess.ReadObjectNode readKwNode,
@Cached ExpandKeywordStarargsNode castKwargsNode,
@Cached SequenceStorageNodes.GetItemScalarNode getItemScalarNode,
@Cached CallNode callNode,
@Cached CastToTruffleStringNode castToTruffleStringNode) {
try {
Object[] args = readNode.readPyObjectArray(argsArray, (int) nargs);
PKeyword[] keywords;
if (kwargs instanceof PNone) {
keywords = PKeyword.EMPTY_KEYWORDS;
} else if (kwargs instanceof PDict) {
keywords = castKwargsNode.execute(inliningTarget, kwargs);
} else if (kwargs instanceof PTuple) {
// We have a tuple with kw names and an array with kw values
PTuple kwTuple = (PTuple) kwargs;
SequenceStorage storage = kwTuple.getSequenceStorage();
int kwcount = storage.length();
Object[] kwValues = readKwNode.readPyObjectArray(argsArray, kwcount, (int) nargs);
keywords = new PKeyword[kwcount];
for (int i = 0; i < kwcount; i++) {
TruffleString name = castToTruffleStringNode.execute(inliningTarget, getItemScalarNode.execute(inliningTarget, storage, i));
keywords[i] = new PKeyword(name, kwValues[i]);
}
} else {
throw CompilerDirectives.shouldNotReachHere("_PyObject_MakeTpCall: keywords must be NULL, a tuple or a dict");
}
return callNode.execute(null, callable, args, keywords);
} catch (CannotCastException e) {
// I think we can just assume that there won't be more than
// Integer.MAX_VALUE arguments.
throw CompilerDirectives.shouldNotReachHere(e);
}
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_Str extends CApiUnaryBuiltinNode {
@Specialization(guards = "!isNoValue(obj)")
Object doGeneric(Object obj,
@Bind Node inliningTarget,
@Cached PyObjectStrAsObjectNode strNode) {
return strNode.execute(inliningTarget, obj);
}
@Specialization(guards = "isNoValue(obj)")
static TruffleString asciiNone(@SuppressWarnings("unused") PNone obj) {
return StringLiterals.T_NULL_RESULT;
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_Repr extends CApiUnaryBuiltinNode {
@Specialization(guards = "!isNoValue(obj)")
Object doGeneric(Object obj,
@Bind Node inliningTarget,
@Cached PyObjectReprAsObjectNode reprNode) {
return reprNode.execute(null, inliningTarget, obj);
}
@Specialization(guards = "isNoValue(obj)")
static TruffleString asciiNone(@SuppressWarnings("unused") PNone obj) {
return StringLiterals.T_NULL_RESULT;
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject}, call = Direct)
abstract static class PyObject_DelItem extends CApiBinaryBuiltinNode {
@Specialization
static Object doGeneric(Object obj, Object k,
@Bind Node inliningTarget,
@Cached PyObjectDelItem delNode) {
delNode.execute(null, inliningTarget, obj, k);
return 0;
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject, PyObject}, call = Direct)
abstract static class PyObject_SetItem extends CApiTernaryBuiltinNode {
@Specialization
static Object doGeneric(Object obj, Object k, Object v,
@Bind Node inliningTarget,
@Cached PyObjectSetItem setItemNode) {
setItemNode.execute(null, inliningTarget, obj, k, v);
return 0;
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject}, call = Direct)
abstract static class PyObject_IsInstance extends CApiBinaryBuiltinNode {
@Specialization
static int doGeneric(Object obj, Object typ,
@Cached IsInstanceNode isInstanceNode) {
return intValue((boolean) isInstanceNode.execute(null, obj, typ));
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject}, call = Direct)
abstract static class PyObject_IsSubclass extends CApiBinaryBuiltinNode {
@Specialization
static int doGeneric(Object obj, Object typ,
@Cached IsSubClassNode isSubclassNode) {
return intValue((boolean) isSubclassNode.execute(null, obj, typ));
}
}
@CApiBuiltin(ret = Int, args = {PyObject}, call = Direct)
abstract static class PyObject_AsFileDescriptor extends CApiUnaryBuiltinNode {
@Specialization
static Object asFileDescriptor(Object obj,
@Bind Node inliningTarget,
@Cached PyLongCheckNode longCheckNode,
@CachedLibrary(limit = "1") PosixSupportLibrary posixLib,
@Cached TruffleString.EqualNode eqNode,
@Cached PyObjectAsFileDescriptor asFileDescriptorNode,
@Cached PRaiseNode raiseNode) {
if (!longCheckNode.execute(inliningTarget, obj)) {
Object posixSupport = PythonContext.get(inliningTarget).getPosixSupport();
if (eqNode.execute(T_JAVA, posixLib.getBackend(posixSupport), TS_ENCODING)) {
/*
* For non Python 'int' objects, we refuse to hand out the fileno field when
* using the emulated Posix backend, because it is likely a fake.
*/
throw raiseNode.raise(inliningTarget, NotImplementedError, ErrorMessages.S_NOT_SUPPORTED_ON_JAVA_POSIX_BACKEND, "PyObject_AsFileDescriptor");
}
}
return asFileDescriptorNode.execute(null, inliningTarget, obj);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, PyObject}, call = Ignored)
abstract static class GraalPyPrivate_Object_GenericGetAttr extends CApiBinaryBuiltinNode {
@Specialization
static Object getAttr(Object obj, Object attr,
@Cached GetAttributeNode getAttrNode) {
return getAttrNode.execute(null, obj, attr);
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject, PyObject}, call = Ignored)
abstract static class GraalPyPrivate_Object_GenericSetAttr extends CApiTernaryBuiltinNode {
@Specialization
static int setAttr(Object obj, Object attr, Object value,
@Cached SetattrNode setAttrNode) {
setAttrNode.execute(null, obj, attr, value);
return 0;
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject}, call = Direct)
@CApiBuiltin(name = "PyObject_HasAttrString", ret = Int, args = {PyObject, ConstCharPtrAsTruffleString}, call = Direct)
abstract static class PyObject_HasAttr extends CApiBinaryBuiltinNode {
@Specialization
static int hasAttr(Object obj, Object attr,
@Bind Node inliningTarget,
@Cached PyObjectLookupAttrO lookupAttrNode,
@Cached InlinedBranchProfile exceptioBranchProfile) {
try {
return lookupAttrNode.execute(null, inliningTarget, obj, attr) != PNone.NO_VALUE ? 1 : 0;
} catch (PException e) {
exceptioBranchProfile.enter(inliningTarget);
return 0;
}
}
}
@CApiBuiltin(ret = Py_hash_t, args = {PyObject}, call = Direct)
abstract static class PyObject_HashNotImplemented extends CApiUnaryBuiltinNode {
@Specialization
static Object unhashable(Object obj,
@Bind Node inliningTarget) {
throw PRaiseNode.raiseStatic(inliningTarget, PythonBuiltinClassType.TypeError, UNHASHABLE_TYPE_P, obj);
}
}
@CApiBuiltin(ret = Int, args = {PyObject}, call = Direct)
abstract static class PyObject_IsTrue extends CApiUnaryBuiltinNode {
@Specialization
static int isTrue(Object obj,
@Cached PyObjectIsTrueNode isTrueNode) {
return isTrueNode.execute(null, obj) ? 1 : 0;
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_Bytes extends CApiUnaryBuiltinNode {
@Specialization(guards = "isBuiltinBytes(bytes)")
static Object bytes(PBytes bytes) {
return bytes;
}
@Specialization(guards = "isNoValue(obj)")
static Object bytesNoValue(@SuppressWarnings("unused") Object obj,
@Bind PythonLanguage language) {
/*
* Note: CPython calls PyBytes_FromString("<NULL>") but we do not directly have it.
* Therefore, we directly create the bytes object with string "<NULL>" here.
*/
return PFactory.createBytes(language, BytesUtils.NULL_STRING);
}
@Fallback
static Object doGeneric(Object obj,
@Bind Node inliningTarget,
@Cached GetClassNode getClassNode,
@Cached InlinedConditionProfile hasBytes,
@Cached("create(T___BYTES__)") LookupSpecialMethodNode lookupBytes,
@Cached CallUnaryMethodNode callBytes,
@Cached PyBytesCheckNode check,
@Cached BytesNodes.BytesFromObject fromObject,
@Cached PRaiseNode raiseNode) {
Object bytesMethod = lookupBytes.execute(null, getClassNode.execute(inliningTarget, obj), obj);
if (hasBytes.profile(inliningTarget, bytesMethod != PNone.NO_VALUE)) {
Object bytes = callBytes.executeObject(null, bytesMethod, obj);
if (check.execute(inliningTarget, bytes)) {
return bytes;
} else {
throw raiseNode.raise(inliningTarget, TypeError, ErrorMessages.RETURNED_NONBYTES, T___BYTES__, bytes);
}
}
byte[] bytes = fromObject.execute(null, obj);
return PFactory.createBytes(PythonLanguage.get(inliningTarget), bytes);
}
}
@CApiBuiltin(ret = PyObjectTransfer, call = Ignored)
abstract static class GraalPyPrivate_NotImplemented extends CApiNullaryBuiltinNode {
@Specialization
static Object run() {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@CApiBuiltin(ret = PyObjectTransfer, call = Ignored)
abstract static class GraalPyPrivate_NoValue extends CApiNullaryBuiltinNode {
@Specialization
static PNone doNoValue() {
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = PyObjectTransfer, call = Ignored)
abstract static class GraalPyPrivate_None extends CApiNullaryBuiltinNode {
@Specialization
static PNone doNativeNone() {
return PNone.NONE;
}
}
@CApiBuiltin(ret = Void, args = {PyVarObject, Py_ssize_t}, call = Ignored)
abstract static class GraalPyPrivate_SET_SIZE extends CApiBinaryBuiltinNode {
@Specialization
static PNone set(PSequence obj, long size,
@Bind Node inliningTarget,
@Cached SequenceNodes.GetSequenceStorageNode getSequenceStorageNode,
@Cached InlinedBranchProfile basicProfile,
@Cached InlinedBranchProfile nativeProfile) {
SequenceStorage storage = getSequenceStorageNode.execute(inliningTarget, obj);
// Can't use SetLenNode as that decrefs items for native storages when shrinking
if (storage instanceof ArrayBasedSequenceStorage basicStorage) {
basicProfile.enter(inliningTarget);
basicStorage.setNewLength((int) size);
} else if (storage instanceof NativeSequenceStorage nativeStorage) {
nativeProfile.enter(inliningTarget);
nativeStorage.setNewLength((int) size);
} else if (storage instanceof EmptySequenceStorage) {
if (size > 0) {
throw CompilerDirectives.shouldNotReachHere("invalid Py_SET_SIZE call");
}
} else {
throw CompilerDirectives.shouldNotReachHere("unhandled storage type");
}
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = Int, args = {PyObjectRawPointer}, call = Ignored)
abstract static class GraalPyPrivate_Object_IsFreed extends CApiUnaryBuiltinNode {
@Specialization
int doGeneric(Object pointer,
@Cached ToPythonWrapperNode toPythonWrapperNode) {
return toPythonWrapperNode.executeWrapper(pointer, false) == null ? 1 : 0;
}
}
@CApiBuiltin(ret = Void, args = {PyObjectWrapper}, call = Ignored)
abstract static class GraalPyPrivate_Object_Dump extends CApiUnaryBuiltinNode {
@Specialization
@TruffleBoundary
int doGeneric(Object ptrObject,
@Cached CStructAccess.ReadI64Node readI64) {
PythonContext context = getContext();
PrintWriter stderr = new PrintWriter(context.getStandardErr());
// There are three cases we need to distinguish:
// 1) The pointer object is a native pointer and is NOT a handle
// 2) The pointer object is a native pointer and is a handle
// 3) The pointer object is one of our native wrappers
boolean isWrapper = CApiGuards.isNativeWrapper(ptrObject);
/*
* At this point we don't know if the pointer is invalid, so we try to resolve it to an
* object.
*/
Object resolved = isWrapper ? ptrObject : ResolvePointerNode.executeUncached(ptrObject);
Object pythonObject;
long refCnt;
// We need again check if 'resolved' is a wrapper in case we resolved a handle.
if (resolved instanceof PythonAbstractObjectNativeWrapper objectNativeWrapper) {
if (objectNativeWrapper.isNative()) {
refCnt = objectNativeWrapper.getRefCount();
} else {
refCnt = PythonAbstractObjectNativeWrapper.MANAGED_REFCNT;
}
} else {
refCnt = readI64.read(PythonToNativeNode.executeUncached(resolved), CFields.PyObject__ob_refcnt);
}
pythonObject = NativeToPythonNode.executeUncached(ptrObject);
// first, write fields which are the least likely to crash
stderr.println("ptrObject address : " + ptrObject);
stderr.println("ptrObject refcount : " + refCnt);
stderr.flush();
Object type = GetClassNode.executeUncached(pythonObject);
stderr.println("object type : " + type);
stderr.println("object type name: " + TypeNodes.GetNameNode.executeUncached(type));
// the most dangerous part
stderr.println("object repr : ");
stderr.flush();
try {
Object reprObj = PyObjectCallMethodObjArgs.executeUncached(context.getBuiltins(), BuiltinNames.T_REPR, pythonObject);
stderr.println(CastToJavaStringNode.getUncached().execute(reprObj));
} catch (PException | CannotCastException e) {
// errors are ignored at this point
}
stderr.flush();
return 0;
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_ASCII extends CApiUnaryBuiltinNode {
@Specialization(guards = "!isNoValue(obj)")
static TruffleString ascii(Object obj,
@Bind Node inliningTarget,
@Cached PyObjectAsciiNode asciiNode) {
return asciiNode.execute(null, inliningTarget, obj);
}
@Specialization(guards = "isNoValue(obj)")
static TruffleString asciiNone(@SuppressWarnings("unused") PNone obj) {
return StringLiterals.T_NULL_RESULT;
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject, PyObject}, call = Direct)
abstract static class PyObject_Format extends CApiBinaryBuiltinNode {
@Specialization
static Object ascii(Object obj, Object spec,
@Cached FormatNode format) {
return format.execute(null, obj, spec);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_GetIter extends CApiUnaryBuiltinNode {
@Specialization
static Object iter(Object object,
@Bind Node inliningTarget,
@Cached PyObjectGetIter getIter) {
return getIter.execute(null, inliningTarget, object);
}
}
@CApiBuiltin(ret = Py_hash_t, args = {PyObject}, call = Direct)
abstract static class PyObject_Hash extends CApiUnaryBuiltinNode {
@Specialization
static long hash(Object object,
@Bind Node inliningTarget,
@Cached PyObjectHashNode hashNode) {
return hashNode.execute(null, inliningTarget, object);
}
}
@CApiBuiltin(ret = Int, args = {PyObject}, call = Direct)
abstract static class PyCallable_Check extends CApiUnaryBuiltinNode {
@Specialization
static int doGeneric(Object object,
@Bind Node inliningTarget,
@Cached PyCallableCheckNode callableCheck) {
return intValue(callableCheck.execute(inliningTarget, object));
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Direct)
abstract static class PyObject_Dir extends CApiUnaryBuiltinNode {
@Specialization
static Object dir(Object object,
@Bind Node inliningTarget,
@Cached PyObjectDir dir) {
return dir.execute(null, inliningTarget, object);
}
}
@CApiBuiltin(ret = PyObjectTransfer, args = {PyObject}, call = Ignored)
abstract static class GraalPyPrivate_Object_GenericGetDict extends CApiUnaryBuiltinNode {
@Specialization
static Object getDict(Object object,
@Bind Node inliningTarget,
@Cached GetOrCreateDictNode getDict) {
return getDict.execute(inliningTarget, object);
}
}
@CApiBuiltin(ret = Int, args = {PyObject, PyObject}, call = Ignored)
abstract static class GraalPyPrivate_Is extends CApiBinaryBuiltinNode {
@Specialization
static int isTrue(Object a, Object b,
@Cached IsNode isNode) {
return isNode.execute(a, b) ? 1 : 0;
}
}
}
|
googleapis/google-api-java-client-services | 38,101 | clients/google-api-services-compute/alpha/1.28.0/com/google/api/services/compute/model/Image.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents an Image resource.
*
* You can use images to create boot disks for your VM instances. For more information, read Images.
* (== resource_for beta.images ==) (== resource_for v1.images ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Image extends com.google.api.client.json.GenericJson {
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long archiveSizeBytes;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The deprecation status associated with this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeprecationStatus deprecated;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Size of the image when restored onto a persistent disk (in GB).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long diskSizeGb;
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String family;
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GuestOsFeature> guestOsFeatures;
static {
// hack to force ProGuard to consider GuestOsFeature used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GuestOsFeature.class);
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey imageEncryptionKey;
/**
* [Output Only] Type of the resource. Always compute#image for images.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Integer license codes indicating which licenses are attached to this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.List<java.lang.Long> licenseCodes;
/**
* Any applicable license URI.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> licenses;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The parameters of the raw disk image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RawDisk rawDisk;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] Server-defined URL for this resource's resource id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLinkWithId;
/**
* Set the secure boot keys of shielded instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InitialStateConfig shieldedInstanceInitialState;
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDisk;
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceDiskEncryptionKey;
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDiskId;
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImage;
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceImageEncryptionKey;
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImageId;
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshot;
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceSnapshotEncryptionKey;
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshotId;
/**
* The type of the image used to create this disk. The default and only value is RAW
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceType;
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> storageLocations;
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @return value or {@code null} for none
*/
public java.lang.Long getArchiveSizeBytes() {
return archiveSizeBytes;
}
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @param archiveSizeBytes archiveSizeBytes or {@code null} for none
*/
public Image setArchiveSizeBytes(java.lang.Long archiveSizeBytes) {
this.archiveSizeBytes = archiveSizeBytes;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Image setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The deprecation status associated with this image.
* @return value or {@code null} for none
*/
public DeprecationStatus getDeprecated() {
return deprecated;
}
/**
* The deprecation status associated with this image.
* @param deprecated deprecated or {@code null} for none
*/
public Image setDeprecated(DeprecationStatus deprecated) {
this.deprecated = deprecated;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Image setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @return value or {@code null} for none
*/
public java.lang.Long getDiskSizeGb() {
return diskSizeGb;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @param diskSizeGb diskSizeGb or {@code null} for none
*/
public Image setDiskSizeGb(java.lang.Long diskSizeGb) {
this.diskSizeGb = diskSizeGb;
return this;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @return value or {@code null} for none
*/
public java.lang.String getFamily() {
return family;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @param family family or {@code null} for none
*/
public Image setFamily(java.lang.String family) {
this.family = family;
return this;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @return value or {@code null} for none
*/
public java.util.List<GuestOsFeature> getGuestOsFeatures() {
return guestOsFeatures;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @param guestOsFeatures guestOsFeatures or {@code null} for none
*/
public Image setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) {
this.guestOsFeatures = guestOsFeatures;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Image setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getImageEncryptionKey() {
return imageEncryptionKey;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @param imageEncryptionKey imageEncryptionKey or {@code null} for none
*/
public Image setImageEncryptionKey(CustomerEncryptionKey imageEncryptionKey) {
this.imageEncryptionKey = imageEncryptionKey;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @param kind kind or {@code null} for none
*/
public Image setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public Image setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Image encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Image setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.Long> getLicenseCodes() {
return licenseCodes;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @param licenseCodes licenseCodes or {@code null} for none
*/
public Image setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) {
this.licenseCodes = licenseCodes;
return this;
}
/**
* Any applicable license URI.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLicenses() {
return licenses;
}
/**
* Any applicable license URI.
* @param licenses licenses or {@code null} for none
*/
public Image setLicenses(java.util.List<java.lang.String> licenses) {
this.licenses = licenses;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Image setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The parameters of the raw disk image.
* @return value or {@code null} for none
*/
public RawDisk getRawDisk() {
return rawDisk;
}
/**
* The parameters of the raw disk image.
* @param rawDisk rawDisk or {@code null} for none
*/
public Image setRawDisk(RawDisk rawDisk) {
this.rawDisk = rawDisk;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Image setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] Server-defined URL for this resource's resource id.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLinkWithId() {
return selfLinkWithId;
}
/**
* [Output Only] Server-defined URL for this resource's resource id.
* @param selfLinkWithId selfLinkWithId or {@code null} for none
*/
public Image setSelfLinkWithId(java.lang.String selfLinkWithId) {
this.selfLinkWithId = selfLinkWithId;
return this;
}
/**
* Set the secure boot keys of shielded instance.
* @return value or {@code null} for none
*/
public InitialStateConfig getShieldedInstanceInitialState() {
return shieldedInstanceInitialState;
}
/**
* Set the secure boot keys of shielded instance.
* @param shieldedInstanceInitialState shieldedInstanceInitialState or {@code null} for none
*/
public Image setShieldedInstanceInitialState(InitialStateConfig shieldedInstanceInitialState) {
this.shieldedInstanceInitialState = shieldedInstanceInitialState;
return this;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @return value or {@code null} for none
*/
public java.lang.String getSourceDisk() {
return sourceDisk;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @param sourceDisk sourceDisk or {@code null} for none
*/
public Image setSourceDisk(java.lang.String sourceDisk) {
this.sourceDisk = sourceDisk;
return this;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceDiskEncryptionKey() {
return sourceDiskEncryptionKey;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @param sourceDiskEncryptionKey sourceDiskEncryptionKey or {@code null} for none
*/
public Image setSourceDiskEncryptionKey(CustomerEncryptionKey sourceDiskEncryptionKey) {
this.sourceDiskEncryptionKey = sourceDiskEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceDiskId() {
return sourceDiskId;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @param sourceDiskId sourceDiskId or {@code null} for none
*/
public Image setSourceDiskId(java.lang.String sourceDiskId) {
this.sourceDiskId = sourceDiskId;
return this;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImage() {
return sourceImage;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @param sourceImage sourceImage or {@code null} for none
*/
public Image setSourceImage(java.lang.String sourceImage) {
this.sourceImage = sourceImage;
return this;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceImageEncryptionKey() {
return sourceImageEncryptionKey;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none
*/
public Image setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) {
this.sourceImageEncryptionKey = sourceImageEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImageId() {
return sourceImageId;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @param sourceImageId sourceImageId or {@code null} for none
*/
public Image setSourceImageId(java.lang.String sourceImageId) {
this.sourceImageId = sourceImageId;
return this;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshot() {
return sourceSnapshot;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @param sourceSnapshot sourceSnapshot or {@code null} for none
*/
public Image setSourceSnapshot(java.lang.String sourceSnapshot) {
this.sourceSnapshot = sourceSnapshot;
return this;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceSnapshotEncryptionKey() {
return sourceSnapshotEncryptionKey;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none
*/
public Image setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) {
this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshotId() {
return sourceSnapshotId;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @param sourceSnapshotId sourceSnapshotId or {@code null} for none
*/
public Image setSourceSnapshotId(java.lang.String sourceSnapshotId) {
this.sourceSnapshotId = sourceSnapshotId;
return this;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @return value or {@code null} for none
*/
public java.lang.String getSourceType() {
return sourceType;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @param sourceType sourceType or {@code null} for none
*/
public Image setSourceType(java.lang.String sourceType) {
this.sourceType = sourceType;
return this;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @param status status or {@code null} for none
*/
public Image setStatus(java.lang.String status) {
this.status = status;
return this;
}
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getStorageLocations() {
return storageLocations;
}
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* @param storageLocations storageLocations or {@code null} for none
*/
public Image setStorageLocations(java.util.List<java.lang.String> storageLocations) {
this.storageLocations = storageLocations;
return this;
}
@Override
public Image set(String fieldName, Object value) {
return (Image) super.set(fieldName, value);
}
@Override
public Image clone() {
return (Image) super.clone();
}
/**
* The parameters of the raw disk image.
*/
public static final class RawDisk extends com.google.api.client.json.GenericJson {
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String containerType;
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sha1Checksum;
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String source;
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getContainerType() {
return containerType;
}
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @param containerType containerType or {@code null} for none
*/
public RawDisk setContainerType(java.lang.String containerType) {
this.containerType = containerType;
return this;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getSha1Checksum() {
return sha1Checksum;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @param sha1Checksum sha1Checksum or {@code null} for none
*/
public RawDisk setSha1Checksum(java.lang.String sha1Checksum) {
this.sha1Checksum = sha1Checksum;
return this;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @return value or {@code null} for none
*/
public java.lang.String getSource() {
return source;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @param source source or {@code null} for none
*/
public RawDisk setSource(java.lang.String source) {
this.source = source;
return this;
}
@Override
public RawDisk set(String fieldName, Object value) {
return (RawDisk) super.set(fieldName, value);
}
@Override
public RawDisk clone() {
return (RawDisk) super.clone();
}
}
}
|
googleapis/google-api-java-client-services | 38,101 | clients/google-api-services-compute/alpha/1.29.2/com/google/api/services/compute/model/Image.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents an Image resource.
*
* You can use images to create boot disks for your VM instances. For more information, read Images.
* (== resource_for beta.images ==) (== resource_for v1.images ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Image extends com.google.api.client.json.GenericJson {
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long archiveSizeBytes;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The deprecation status associated with this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeprecationStatus deprecated;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Size of the image when restored onto a persistent disk (in GB).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long diskSizeGb;
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String family;
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GuestOsFeature> guestOsFeatures;
static {
// hack to force ProGuard to consider GuestOsFeature used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GuestOsFeature.class);
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey imageEncryptionKey;
/**
* [Output Only] Type of the resource. Always compute#image for images.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Integer license codes indicating which licenses are attached to this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.List<java.lang.Long> licenseCodes;
/**
* Any applicable license URI.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> licenses;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The parameters of the raw disk image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RawDisk rawDisk;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] Server-defined URL for this resource's resource id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLinkWithId;
/**
* Set the secure boot keys of shielded instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InitialStateConfig shieldedInstanceInitialState;
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDisk;
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceDiskEncryptionKey;
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDiskId;
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImage;
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceImageEncryptionKey;
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImageId;
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshot;
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceSnapshotEncryptionKey;
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshotId;
/**
* The type of the image used to create this disk. The default and only value is RAW
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceType;
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> storageLocations;
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @return value or {@code null} for none
*/
public java.lang.Long getArchiveSizeBytes() {
return archiveSizeBytes;
}
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @param archiveSizeBytes archiveSizeBytes or {@code null} for none
*/
public Image setArchiveSizeBytes(java.lang.Long archiveSizeBytes) {
this.archiveSizeBytes = archiveSizeBytes;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Image setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The deprecation status associated with this image.
* @return value or {@code null} for none
*/
public DeprecationStatus getDeprecated() {
return deprecated;
}
/**
* The deprecation status associated with this image.
* @param deprecated deprecated or {@code null} for none
*/
public Image setDeprecated(DeprecationStatus deprecated) {
this.deprecated = deprecated;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Image setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @return value or {@code null} for none
*/
public java.lang.Long getDiskSizeGb() {
return diskSizeGb;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @param diskSizeGb diskSizeGb or {@code null} for none
*/
public Image setDiskSizeGb(java.lang.Long diskSizeGb) {
this.diskSizeGb = diskSizeGb;
return this;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @return value or {@code null} for none
*/
public java.lang.String getFamily() {
return family;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @param family family or {@code null} for none
*/
public Image setFamily(java.lang.String family) {
this.family = family;
return this;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @return value or {@code null} for none
*/
public java.util.List<GuestOsFeature> getGuestOsFeatures() {
return guestOsFeatures;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @param guestOsFeatures guestOsFeatures or {@code null} for none
*/
public Image setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) {
this.guestOsFeatures = guestOsFeatures;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Image setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getImageEncryptionKey() {
return imageEncryptionKey;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @param imageEncryptionKey imageEncryptionKey or {@code null} for none
*/
public Image setImageEncryptionKey(CustomerEncryptionKey imageEncryptionKey) {
this.imageEncryptionKey = imageEncryptionKey;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @param kind kind or {@code null} for none
*/
public Image setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public Image setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Image encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Image setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.Long> getLicenseCodes() {
return licenseCodes;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @param licenseCodes licenseCodes or {@code null} for none
*/
public Image setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) {
this.licenseCodes = licenseCodes;
return this;
}
/**
* Any applicable license URI.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLicenses() {
return licenses;
}
/**
* Any applicable license URI.
* @param licenses licenses or {@code null} for none
*/
public Image setLicenses(java.util.List<java.lang.String> licenses) {
this.licenses = licenses;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Image setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The parameters of the raw disk image.
* @return value or {@code null} for none
*/
public RawDisk getRawDisk() {
return rawDisk;
}
/**
* The parameters of the raw disk image.
* @param rawDisk rawDisk or {@code null} for none
*/
public Image setRawDisk(RawDisk rawDisk) {
this.rawDisk = rawDisk;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Image setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] Server-defined URL for this resource's resource id.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLinkWithId() {
return selfLinkWithId;
}
/**
* [Output Only] Server-defined URL for this resource's resource id.
* @param selfLinkWithId selfLinkWithId or {@code null} for none
*/
public Image setSelfLinkWithId(java.lang.String selfLinkWithId) {
this.selfLinkWithId = selfLinkWithId;
return this;
}
/**
* Set the secure boot keys of shielded instance.
* @return value or {@code null} for none
*/
public InitialStateConfig getShieldedInstanceInitialState() {
return shieldedInstanceInitialState;
}
/**
* Set the secure boot keys of shielded instance.
* @param shieldedInstanceInitialState shieldedInstanceInitialState or {@code null} for none
*/
public Image setShieldedInstanceInitialState(InitialStateConfig shieldedInstanceInitialState) {
this.shieldedInstanceInitialState = shieldedInstanceInitialState;
return this;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @return value or {@code null} for none
*/
public java.lang.String getSourceDisk() {
return sourceDisk;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @param sourceDisk sourceDisk or {@code null} for none
*/
public Image setSourceDisk(java.lang.String sourceDisk) {
this.sourceDisk = sourceDisk;
return this;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceDiskEncryptionKey() {
return sourceDiskEncryptionKey;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @param sourceDiskEncryptionKey sourceDiskEncryptionKey or {@code null} for none
*/
public Image setSourceDiskEncryptionKey(CustomerEncryptionKey sourceDiskEncryptionKey) {
this.sourceDiskEncryptionKey = sourceDiskEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceDiskId() {
return sourceDiskId;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @param sourceDiskId sourceDiskId or {@code null} for none
*/
public Image setSourceDiskId(java.lang.String sourceDiskId) {
this.sourceDiskId = sourceDiskId;
return this;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImage() {
return sourceImage;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @param sourceImage sourceImage or {@code null} for none
*/
public Image setSourceImage(java.lang.String sourceImage) {
this.sourceImage = sourceImage;
return this;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceImageEncryptionKey() {
return sourceImageEncryptionKey;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none
*/
public Image setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) {
this.sourceImageEncryptionKey = sourceImageEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImageId() {
return sourceImageId;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @param sourceImageId sourceImageId or {@code null} for none
*/
public Image setSourceImageId(java.lang.String sourceImageId) {
this.sourceImageId = sourceImageId;
return this;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshot() {
return sourceSnapshot;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @param sourceSnapshot sourceSnapshot or {@code null} for none
*/
public Image setSourceSnapshot(java.lang.String sourceSnapshot) {
this.sourceSnapshot = sourceSnapshot;
return this;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceSnapshotEncryptionKey() {
return sourceSnapshotEncryptionKey;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none
*/
public Image setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) {
this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshotId() {
return sourceSnapshotId;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @param sourceSnapshotId sourceSnapshotId or {@code null} for none
*/
public Image setSourceSnapshotId(java.lang.String sourceSnapshotId) {
this.sourceSnapshotId = sourceSnapshotId;
return this;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @return value or {@code null} for none
*/
public java.lang.String getSourceType() {
return sourceType;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @param sourceType sourceType or {@code null} for none
*/
public Image setSourceType(java.lang.String sourceType) {
this.sourceType = sourceType;
return this;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @param status status or {@code null} for none
*/
public Image setStatus(java.lang.String status) {
this.status = status;
return this;
}
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getStorageLocations() {
return storageLocations;
}
/**
* Cloud Storage bucket storage location of the image (regional or multi-regional).
* @param storageLocations storageLocations or {@code null} for none
*/
public Image setStorageLocations(java.util.List<java.lang.String> storageLocations) {
this.storageLocations = storageLocations;
return this;
}
@Override
public Image set(String fieldName, Object value) {
return (Image) super.set(fieldName, value);
}
@Override
public Image clone() {
return (Image) super.clone();
}
/**
* The parameters of the raw disk image.
*/
public static final class RawDisk extends com.google.api.client.json.GenericJson {
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String containerType;
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sha1Checksum;
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String source;
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getContainerType() {
return containerType;
}
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @param containerType containerType or {@code null} for none
*/
public RawDisk setContainerType(java.lang.String containerType) {
this.containerType = containerType;
return this;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getSha1Checksum() {
return sha1Checksum;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @param sha1Checksum sha1Checksum or {@code null} for none
*/
public RawDisk setSha1Checksum(java.lang.String sha1Checksum) {
this.sha1Checksum = sha1Checksum;
return this;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @return value or {@code null} for none
*/
public java.lang.String getSource() {
return source;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @param source source or {@code null} for none
*/
public RawDisk setSource(java.lang.String source) {
this.source = source;
return this;
}
@Override
public RawDisk set(String fieldName, Object value) {
return (RawDisk) super.set(fieldName, value);
}
@Override
public RawDisk clone() {
return (RawDisk) super.clone();
}
}
}
|
googleapis/google-cloud-java | 37,921 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/ListAccountsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/accounts.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Response message for the `ListAccounts` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListAccountsResponse}
*/
public final class ListAccountsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.ListAccountsResponse)
ListAccountsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAccountsResponse.newBuilder() to construct.
private ListAccountsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAccountsResponse() {
accounts_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAccountsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.Builder.class);
}
public static final int ACCOUNTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.accounts.v1beta.Account> accounts_;
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Account> getAccountsList() {
return accounts_;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder>
getAccountsOrBuilderList() {
return accounts_;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
@java.lang.Override
public int getAccountsCount() {
return accounts_.size();
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.Account getAccounts(int index) {
return accounts_.get(index);
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder getAccountsOrBuilder(
int index) {
return accounts_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < accounts_.size(); i++) {
output.writeMessage(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < accounts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse other =
(com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse) obj;
if (!getAccountsList().equals(other.getAccountsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAccountsCount() > 0) {
hash = (37 * hash) + ACCOUNTS_FIELD_NUMBER;
hash = (53 * hash) + getAccountsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListAccounts` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListAccountsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.ListAccountsResponse)
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.Builder.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
} else {
accounts_ = null;
accountsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListAccountsResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse build() {
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse buildPartial() {
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse result =
new com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse result) {
if (accountsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
accounts_ = java.util.Collections.unmodifiableList(accounts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.accounts_ = accounts_;
} else {
result.accounts_ = accountsBuilder_.build();
}
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse) {
return mergeFrom((com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse.getDefaultInstance())
return this;
if (accountsBuilder_ == null) {
if (!other.accounts_.isEmpty()) {
if (accounts_.isEmpty()) {
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAccountsIsMutable();
accounts_.addAll(other.accounts_);
}
onChanged();
}
} else {
if (!other.accounts_.isEmpty()) {
if (accountsBuilder_.isEmpty()) {
accountsBuilder_.dispose();
accountsBuilder_ = null;
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
accountsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAccountsFieldBuilder()
: null;
} else {
accountsBuilder_.addAllMessages(other.accounts_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.accounts.v1beta.Account m =
input.readMessage(
com.google.shopping.merchant.accounts.v1beta.Account.parser(),
extensionRegistry);
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(m);
} else {
accountsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.accounts.v1beta.Account> accounts_ =
java.util.Collections.emptyList();
private void ensureAccountsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
accounts_ =
new java.util.ArrayList<com.google.shopping.merchant.accounts.v1beta.Account>(
accounts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Account,
com.google.shopping.merchant.accounts.v1beta.Account.Builder,
com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder>
accountsBuilder_;
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Account> getAccountsList() {
if (accountsBuilder_ == null) {
return java.util.Collections.unmodifiableList(accounts_);
} else {
return accountsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public int getAccountsCount() {
if (accountsBuilder_ == null) {
return accounts_.size();
} else {
return accountsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Account getAccounts(int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder setAccounts(
int index, com.google.shopping.merchant.accounts.v1beta.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.set(index, value);
onChanged();
} else {
accountsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder setAccounts(
int index, com.google.shopping.merchant.accounts.v1beta.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.set(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder addAccounts(com.google.shopping.merchant.accounts.v1beta.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(value);
onChanged();
} else {
accountsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder addAccounts(
int index, com.google.shopping.merchant.accounts.v1beta.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(index, value);
onChanged();
} else {
accountsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder addAccounts(
com.google.shopping.merchant.accounts.v1beta.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder addAccounts(
int index, com.google.shopping.merchant.accounts.v1beta.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder addAllAccounts(
java.lang.Iterable<? extends com.google.shopping.merchant.accounts.v1beta.Account> values) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, accounts_);
onChanged();
} else {
accountsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder clearAccounts() {
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
accountsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public Builder removeAccounts(int index) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.remove(index);
onChanged();
} else {
accountsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Account.Builder getAccountsBuilder(
int index) {
return getAccountsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder getAccountsOrBuilder(
int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder>
getAccountsOrBuilderList() {
if (accountsBuilder_ != null) {
return accountsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(accounts_);
}
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Account.Builder addAccountsBuilder() {
return getAccountsFieldBuilder()
.addBuilder(com.google.shopping.merchant.accounts.v1beta.Account.getDefaultInstance());
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.Account.Builder addAccountsBuilder(
int index) {
return getAccountsFieldBuilder()
.addBuilder(
index, com.google.shopping.merchant.accounts.v1beta.Account.getDefaultInstance());
}
/**
*
*
* <pre>
* The accounts matching the `ListAccountsRequest`.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.Account accounts = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.Account.Builder>
getAccountsBuilderList() {
return getAccountsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Account,
com.google.shopping.merchant.accounts.v1beta.Account.Builder,
com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder>
getAccountsFieldBuilder() {
if (accountsBuilder_ == null) {
accountsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.Account,
com.google.shopping.merchant.accounts.v1beta.Account.Builder,
com.google.shopping.merchant.accounts.v1beta.AccountOrBuilder>(
accounts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
accounts_ = null;
}
return accountsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.ListAccountsResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.ListAccountsResponse)
private static final com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse();
}
public static com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAccountsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAccountsResponse>() {
@java.lang.Override
public ListAccountsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAccountsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAccountsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListAccountsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,940 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/CreateEntryGroupRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.CreateEntryGroupRequest}
*/
public final class CreateEntryGroupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.CreateEntryGroupRequest)
CreateEntryGroupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateEntryGroupRequest.newBuilder() to construct.
private CreateEntryGroupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateEntryGroupRequest() {
parent_ = "";
entryGroupId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateEntryGroupRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateEntryGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateEntryGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.class,
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENTRY_GROUP_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object entryGroupId_ = "";
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The entryGroupId.
*/
@java.lang.Override
public java.lang.String getEntryGroupId() {
java.lang.Object ref = entryGroupId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
entryGroupId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for entryGroupId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEntryGroupIdBytes() {
java.lang.Object ref = entryGroupId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
entryGroupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENTRY_GROUP_FIELD_NUMBER = 2;
private com.google.cloud.datacatalog.v1.EntryGroup entryGroup_;
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*
* @return Whether the entryGroup field is set.
*/
@java.lang.Override
public boolean hasEntryGroup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*
* @return The entryGroup.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.EntryGroup getEntryGroup() {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.EntryGroupOrBuilder getEntryGroupOrBuilder() {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getEntryGroup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entryGroupId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, entryGroupId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEntryGroup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entryGroupId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, entryGroupId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.CreateEntryGroupRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest other =
(com.google.cloud.datacatalog.v1.CreateEntryGroupRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getEntryGroupId().equals(other.getEntryGroupId())) return false;
if (hasEntryGroup() != other.hasEntryGroup()) return false;
if (hasEntryGroup()) {
if (!getEntryGroup().equals(other.getEntryGroup())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + ENTRY_GROUP_ID_FIELD_NUMBER;
hash = (53 * hash) + getEntryGroupId().hashCode();
if (hasEntryGroup()) {
hash = (37 * hash) + ENTRY_GROUP_FIELD_NUMBER;
hash = (53 * hash) + getEntryGroup().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.CreateEntryGroupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.CreateEntryGroupRequest)
com.google.cloud.datacatalog.v1.CreateEntryGroupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateEntryGroupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateEntryGroupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.class,
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEntryGroupFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
entryGroupId_ = "";
entryGroup_ = null;
if (entryGroupBuilder_ != null) {
entryGroupBuilder_.dispose();
entryGroupBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateEntryGroupRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateEntryGroupRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateEntryGroupRequest build() {
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateEntryGroupRequest buildPartial() {
com.google.cloud.datacatalog.v1.CreateEntryGroupRequest result =
new com.google.cloud.datacatalog.v1.CreateEntryGroupRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.CreateEntryGroupRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.entryGroupId_ = entryGroupId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.entryGroup_ = entryGroupBuilder_ == null ? entryGroup_ : entryGroupBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.CreateEntryGroupRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.CreateEntryGroupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.CreateEntryGroupRequest other) {
if (other == com.google.cloud.datacatalog.v1.CreateEntryGroupRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getEntryGroupId().isEmpty()) {
entryGroupId_ = other.entryGroupId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasEntryGroup()) {
mergeEntryGroup(other.getEntryGroup());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getEntryGroupFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 18
case 26:
{
entryGroupId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The names of the project and location that the new entry group
* belongs to.
*
* Note: The entry group itself and its child resources might not be
* stored in the location specified in its name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object entryGroupId_ = "";
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The entryGroupId.
*/
public java.lang.String getEntryGroupId() {
java.lang.Object ref = entryGroupId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
entryGroupId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for entryGroupId.
*/
public com.google.protobuf.ByteString getEntryGroupIdBytes() {
java.lang.Object ref = entryGroupId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
entryGroupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The entryGroupId to set.
* @return This builder for chaining.
*/
public Builder setEntryGroupId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
entryGroupId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearEntryGroupId() {
entryGroupId_ = getDefaultInstance().getEntryGroupId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the entry group to create.
*
* The ID must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string entry_group_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for entryGroupId to set.
* @return This builder for chaining.
*/
public Builder setEntryGroupIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
entryGroupId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.datacatalog.v1.EntryGroup entryGroup_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>
entryGroupBuilder_;
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*
* @return Whether the entryGroup field is set.
*/
public boolean hasEntryGroup() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*
* @return The entryGroup.
*/
public com.google.cloud.datacatalog.v1.EntryGroup getEntryGroup() {
if (entryGroupBuilder_ == null) {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
} else {
return entryGroupBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public Builder setEntryGroup(com.google.cloud.datacatalog.v1.EntryGroup value) {
if (entryGroupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
entryGroup_ = value;
} else {
entryGroupBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public Builder setEntryGroup(
com.google.cloud.datacatalog.v1.EntryGroup.Builder builderForValue) {
if (entryGroupBuilder_ == null) {
entryGroup_ = builderForValue.build();
} else {
entryGroupBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public Builder mergeEntryGroup(com.google.cloud.datacatalog.v1.EntryGroup value) {
if (entryGroupBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& entryGroup_ != null
&& entryGroup_ != com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()) {
getEntryGroupBuilder().mergeFrom(value);
} else {
entryGroup_ = value;
}
} else {
entryGroupBuilder_.mergeFrom(value);
}
if (entryGroup_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public Builder clearEntryGroup() {
bitField0_ = (bitField0_ & ~0x00000004);
entryGroup_ = null;
if (entryGroupBuilder_ != null) {
entryGroupBuilder_.dispose();
entryGroupBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public com.google.cloud.datacatalog.v1.EntryGroup.Builder getEntryGroupBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getEntryGroupFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
public com.google.cloud.datacatalog.v1.EntryGroupOrBuilder getEntryGroupOrBuilder() {
if (entryGroupBuilder_ != null) {
return entryGroupBuilder_.getMessageOrBuilder();
} else {
return entryGroup_ == null
? com.google.cloud.datacatalog.v1.EntryGroup.getDefaultInstance()
: entryGroup_;
}
}
/**
*
*
* <pre>
* The entry group to create. Defaults to empty.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.EntryGroup entry_group = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>
getEntryGroupFieldBuilder() {
if (entryGroupBuilder_ == null) {
entryGroupBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.EntryGroup,
com.google.cloud.datacatalog.v1.EntryGroup.Builder,
com.google.cloud.datacatalog.v1.EntryGroupOrBuilder>(
getEntryGroup(), getParentForChildren(), isClean());
entryGroup_ = null;
}
return entryGroupBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.CreateEntryGroupRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.CreateEntryGroupRequest)
private static final com.google.cloud.datacatalog.v1.CreateEntryGroupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.CreateEntryGroupRequest();
}
public static com.google.cloud.datacatalog.v1.CreateEntryGroupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateEntryGroupRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateEntryGroupRequest>() {
@java.lang.Override
public CreateEntryGroupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateEntryGroupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateEntryGroupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateEntryGroupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite | 38,157 | modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheEntryEx.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.UUID;
import javax.cache.Cache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.eviction.EvictableEntry;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockCancelledException;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicAbstractUpdateFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition;
import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersionedEntryEx;
import org.apache.ignite.internal.processors.dr.GridDrType;
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitorClosure;
import org.jetbrains.annotations.Nullable;
/**
* Internal API for cache entry ({@code 'Ex'} stands for extended).
*/
public interface GridCacheEntryEx {
/**
* @return Memory size.
* @throws IgniteCheckedException If failed.
*/
public int memorySize() throws IgniteCheckedException;
/**
* @return {@code True} if entry is internal cache entry.
*/
public boolean isInternal();
/**
* @return {@code True} if DHT.
*/
public boolean isDht();
/**
* @return {@code True} if near.
*/
public boolean isNear();
/**
* @return {@code True} if replicated.
*/
public boolean isReplicated();
/**
* @return {@code True} if local.
*/
public boolean isLocal();
/**
* @return {@code False} if entry belongs to cache map, {@code true} if this entry was created in colocated
* cache and node is not primary for this key.
*/
public boolean detached();
/**
* Note: this method works only for cache configured in ATOMIC mode or for cache that is
* data center replication target.
*
* @return {@code True} if entry has been already deleted.
*/
public boolean deleted();
/**
* @return Context.
*/
public <K, V> GridCacheContext<K, V> context();
/**
* @return Partition ID.
*/
public int partition();
/**
* @return Key.
*/
public KeyCacheObject key();
/**
* @return Transaction key.
*/
public IgniteTxKey txKey();
/**
* @return Value.
*/
public CacheObject rawGet();
/**
* @return {@code True} if has value or value bytes.
*/
public boolean hasValue();
/**
* @param val New value.
* @param ttl Time to live.
* @return Old value.
*/
public CacheObject rawPut(CacheObject val, long ttl);
/**
* Wraps this map entry into cache entry.
*
* @return Wrapped entry.
*/
public <K, V> Cache.Entry<K, V> wrap();
/**
* Wraps entry to an entry with lazy value get.
* @param keepBinary Keep binary flag.
*
* @return Entry.
*/
public <K, V> Cache.Entry<K, V> wrapLazyValue(boolean keepBinary);
/**
* Peeks value provided to public API entries and to entry filters.
*
* @return Value.
*/
@Nullable public CacheObject peekVisibleValue();
/**
* @return Entry which is safe to pass into eviction policy.
*/
public <K, V> EvictableEntry<K, V> wrapEviction();
/**
* @return Entry which holds key and version (no value, since entry
* is intended to be used in sync evictions checks).
*/
public <K, V> CacheEntryImplEx<K, V> wrapVersioned();
/**
* @return Not-null version if entry is obsolete.
*/
public GridCacheVersion obsoleteVersion();
/**
* @return {@code True} if entry is obsolete.
*/
public boolean obsolete();
/**
* @return {@code True} if entry is obsolete or deleted.
* @see #deleted()
*/
public boolean obsoleteOrDeleted();
/**
* @param exclude Obsolete version to ignore.
* @return {@code True} if obsolete version is not {@code null} and is not the
* passed in version.
*/
public boolean obsolete(GridCacheVersion exclude);
/**
* @return Entry info.
*/
@Nullable public GridCacheEntryInfo info();
/**
* Invalidates this entry.
*
* @param newVer New version to set.
* @return {@code true} if entry is obsolete.
* @throws IgniteCheckedException If swap could not be released.
*/
public boolean invalidate(GridCacheVersion newVer) throws IgniteCheckedException;
/**
* @param obsoleteVer Version for eviction.
* @param filter Optional filter.
* @param evictOffheap Evict offheap value flag.
* @return {@code True} if entry could be evicted.
* @throws IgniteCheckedException In case of error.
*/
public boolean evictInternal(GridCacheVersion obsoleteVer, @Nullable CacheEntryPredicate[] filter,
boolean evictOffheap) throws IgniteCheckedException;
/**
* This method should be called each time entry is marked obsolete
* other than by calling {@link #markObsolete(GridCacheVersion)}.
*/
public void onMarkedObsolete();
/**
* Checks if entry is new assuming lock is held externally.
*
* @return {@code True} if entry is new.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean isNew() throws GridCacheEntryRemovedException;
/**
* Checks if entry is new while holding lock.
*
* @return {@code True} if entry is new.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean isNewLocked() throws GridCacheEntryRemovedException;
/**
* @param topVer Topology version where validation should be performed.
* When negative the latest available topology should be used.
*
* @return Checks if value is valid.
*/
public boolean valid(AffinityTopologyVersion topVer);
/**
* @return {@code True} if partition is in valid.
*/
public boolean partitionValid();
/**
* @param ver Cache version to set. The version will be used on updating entry instead of generated one.
* @param tx Ongoing transaction (possibly null).
* @param readThrough Flag indicating whether to read through.
* @param updateMetrics If {@code true} then metrics should be updated.
* @param evt Flag to signal event notification.
* @param transformClo Transform closure to record event.
* @param taskName Task name.
* @param expiryPlc Expiry policy.
* @param keepBinary Keep binary flag.
* @return Cached value.
* @throws IgniteCheckedException If loading value failed.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public CacheObject innerGet(@Nullable GridCacheVersion ver,
@Nullable IgniteInternalTx tx,
boolean readThrough,
boolean updateMetrics,
boolean evt,
Object transformClo,
String taskName,
@Nullable IgniteCacheExpiryPolicy expiryPlc,
boolean keepBinary)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param ver Cache version to set. The version will be used on updating entry instead of generated one.
* @param tx Cache transaction.
* @param updateMetrics If {@code true} then metrics should be updated.
* @param evt Flag to signal event notification.
* @param transformClo Transform closure to record event.
* @param taskName Task name.
* @param expiryPlc Expiry policy.
* @param keepBinary Keep binary flag.
* @param readerArgs Reader will be added if not null.
* @return Cached value and entry version.
* @throws IgniteCheckedException If loading value failed.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public EntryGetResult innerGetVersioned(
@Nullable GridCacheVersion ver,
IgniteInternalTx tx,
boolean updateMetrics,
boolean evt,
Object transformClo,
String taskName,
@Nullable IgniteCacheExpiryPolicy expiryPlc,
boolean keepBinary,
@Nullable ReaderArguments readerArgs)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param updateMetrics If {@code true} then metrics should be updated.
* @param evt Flag to signal event notification.
* @param taskName Task name.
* @param expiryPlc Expiry policy.
* @param keepBinary Keep binary flag.
* @param readerArgs Reader will be added if not null.
* @throws IgniteCheckedException If loading value failed.
* @throws GridCacheEntryRemovedException If entry was removed.
* @return Cached value, entry version and flag indicating if entry was reserved.
*/
public EntryGetResult innerGetAndReserveForLoad(boolean updateMetrics,
boolean evt,
String taskName,
@Nullable IgniteCacheExpiryPolicy expiryPlc,
boolean keepBinary,
@Nullable ReaderArguments readerArgs) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param ver Expected entry version.
*/
public void clearReserveForLoad(GridCacheVersion ver);
/**
* Reloads entry from underlying storage.
*
* @return Reloaded value.
* @throws IgniteCheckedException If reload failed.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
@Nullable public CacheObject innerReload() throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param tx Cache transaction.
* @param evtNodeId ID of node responsible for this change.
* @param affNodeId Partitioned node iD.
* @param val Value to set.
* @param writeThrough If {@code true} then persist to storage.
* @param retval {@code True} if value should be returned (and unmarshalled if needed).
* @param ttl Time to live.
* @param evt Flag to signal event notification.
* @param metrics Flag to signal metrics update.
* @param keepBinary Keep binary flag.
* @param oldValPresent {@code True} if oldValue present.
* @param oldVal Old value.
* @param topVer Topology version.
* @param drType DR type.
* @param drExpireTime DR expire time (if any).
* @param explicitVer Explicit version (if any).
* @param taskName Task name.
* @param dhtVer Dht version for near cache entry.
* @param updateCntr Update counter.
* @return Tuple containing success flag and old value. If success is {@code false},
* then value is {@code null}.
* @throws IgniteCheckedException If storing value failed.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
public GridCacheUpdateTxResult innerSet(
@Nullable IgniteInternalTx tx,
UUID evtNodeId,
UUID affNodeId,
@Nullable CacheObject val,
boolean writeThrough,
boolean retval,
long ttl,
boolean evt,
boolean metrics,
boolean keepBinary,
boolean oldValPresent,
@Nullable CacheObject oldVal,
AffinityTopologyVersion topVer,
GridDrType drType,
long drExpireTime,
@Nullable GridCacheVersion explicitVer,
String taskName,
@Nullable GridCacheVersion dhtVer,
@Nullable Long updateCntr
) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param tx Cache transaction.
* @param evtNodeId ID of node responsible for this change.
* @param affNodeId Partitioned node iD.
* @param retval {@code True} if value should be returned (and unmarshalled if needed).
* @param evt Flag to signal event notification.
* @param metrics Flag to signal metrics notification.
* @param keepBinary Keep binary flag.
* @param oldValPresent {@code True} if oldValue present.
* @param oldVal Old value.
* @param topVer Topology version.
* @param drType DR type.
* @param explicitVer Explicit version (if any).
* @param taskName Task name.
* @param dhtVer Dht version for near cache entry.
* @return Tuple containing success flag and old value. If success is {@code false},
* then value is {@code null}.
* @throws IgniteCheckedException If remove failed.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
public GridCacheUpdateTxResult innerRemove(
@Nullable IgniteInternalTx tx,
UUID evtNodeId,
UUID affNodeId,
boolean retval,
boolean evt,
boolean metrics,
boolean keepBinary,
boolean oldValPresent,
@Nullable CacheObject oldVal,
AffinityTopologyVersion topVer,
GridDrType drType,
@Nullable GridCacheVersion explicitVer,
String taskName,
@Nullable GridCacheVersion dhtVer,
@Nullable Long updateCntr
) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param ver Cache version to set. Entry will be updated only if current version is less then passed version.
* @param evtNodeId Event node ID.
* @param affNodeId Affinity node ID.
* @param op Update operation.
* @param val Value. Type depends on operation.
* @param invokeArgs Optional arguments for entry processor.
* @param writeThrough Write through flag.
* @param readThrough Read through flag.
* @param retval Return value flag.
* @param expiryPlc Expiry policy.
* @param evt Event flag.
* @param metrics Metrics update flag.
* @param primary If update is performed on primary node (the one which assigns version).
* @param checkVer Whether update should check current version and ignore update if current version is
* greater than passed in.
* @param readRepairRecovery Recovery on Read Repair.
* @param topVer Topology version.
* @param filter Optional filter to check.
* @param drType DR type.
* @param conflictTtl Conflict TTL (if any).
* @param conflictExpireTime Conflict expire time (if any).
* @param conflictVer DR version (if any).
* @param conflictResolve If {@code true} then performs conflicts resolution.
* @param intercept If {@code true} then calls cache interceptor.
* @param taskName Task name.
* @param updateCntr Update counter.
* @param fut Dht atomic future.
* @param transformOp {@code True} if transform operation caused update.
* @return Tuple where first value is flag showing whether operation succeeded,
* second value is old entry value if return value is requested, third is updated entry value,
* fourth is the version to enqueue for deferred delete the fifth is DR conflict context
* or {@code null} if conflict resolution was not performed, the last boolean - whether update should be
* propagated to backups or not.
* @throws IgniteCheckedException If update failed.
* @throws GridCacheEntryRemovedException If entry is obsolete.
*/
public GridCacheUpdateAtomicResult innerUpdate(
GridCacheVersion ver,
UUID evtNodeId,
UUID affNodeId,
GridCacheOperation op,
@Nullable Object val,
@Nullable Object[] invokeArgs,
boolean writeThrough,
boolean readThrough,
boolean retval,
boolean keepBinary,
@Nullable IgniteCacheExpiryPolicy expiryPlc,
boolean evt,
boolean metrics,
boolean primary,
boolean checkVer,
boolean readRepairRecovery,
AffinityTopologyVersion topVer,
@Nullable CacheEntryPredicate[] filter,
GridDrType drType,
long conflictTtl,
long conflictExpireTime,
@Nullable GridCacheVersion conflictVer,
boolean conflictResolve,
boolean intercept,
String taskName,
@Nullable CacheObject prevVal,
@Nullable Long updateCntr,
@Nullable GridDhtAtomicAbstractUpdateFuture fut,
boolean transformOp
) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Marks entry as obsolete and, if possible or required, removes it
* from swap storage.
*
* @param ver Obsolete version.
* @param readers Flag to clear readers as well.
* @throws IgniteCheckedException If failed to remove from swap.
* @return {@code True} if entry was not being used, passed the filter and could be removed.
*/
public boolean clear(GridCacheVersion ver, boolean readers) throws IgniteCheckedException;
/**
* This locks is called by transaction manager during prepare step
* for optimistic transactions.
*
* @param tx Cache transaction.
* @param timeout Timeout for lock acquisition.
* @param serOrder Version for serializable transactions ordering.
* @param serReadVer Optional read entry version for optimistic serializable transaction.
* @param read Read lock flag.
* @return {@code True} if lock was acquired, {@code false} otherwise.
* @throws GridCacheEntryRemovedException If this entry is obsolete.
* @throws GridDistributedLockCancelledException If lock has been cancelled.
*/
public boolean tmLock(IgniteInternalTx tx,
long timeout,
@Nullable GridCacheVersion serOrder,
@Nullable GridCacheVersion serReadVer,
boolean read
) throws GridCacheEntryRemovedException, GridDistributedLockCancelledException;
/**
* Unlocks acquired lock.
*
* @param tx Cache transaction.
* @throws GridCacheEntryRemovedException If this entry has been removed from cache.
*/
public abstract void txUnlock(IgniteInternalTx tx) throws GridCacheEntryRemovedException;
/**
* @param ver Removes lock.
* @return {@code True} If lock has been removed.
* @throws GridCacheEntryRemovedException If this entry has been removed from cache.
*/
public boolean removeLock(GridCacheVersion ver) throws GridCacheEntryRemovedException;
/**
* Sets obsolete flag if possible.
*
* @param ver Version to set as obsolete.
* @return {@code True} if entry is obsolete, {@code false} if
* entry is still used by other threads or nodes.
*/
public boolean markObsolete(GridCacheVersion ver);
/**
* Sets obsolete flag if entry value is {@code null} or entry is expired and no
* locks are held.
*
* @param ver Version to set as obsolete.
* @return {@code True} if entry was marked obsolete.
* @throws IgniteCheckedException If failed.
*/
public boolean markObsoleteIfEmpty(@Nullable GridCacheVersion ver) throws IgniteCheckedException;
/**
* Sets obsolete flag if entry version equals to {@code ver}.
*
* @param ver Version to compare with.
* @return {@code True} if marked obsolete.
*/
public boolean markObsoleteVersion(GridCacheVersion ver);
/**
* @return Version.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
public GridCacheVersion version() throws GridCacheEntryRemovedException;
/**
* Checks if there was read/write conflict in serializable transaction.
*
* @param serReadVer Version read in serializable transaction.
* @return {@code True} if version check passed.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
public boolean checkSerializableReadVersion(GridCacheVersion serReadVer) throws GridCacheEntryRemovedException;
/**
* Peeks into entry without loading value or updating statistics.
*
* @param heap Read from heap flag.
* @param offheap Read from offheap flag.
* @param topVer Topology version.
* @param plc Expiry policy if TTL should be updated.
* @return Value.
* @throws GridCacheEntryRemovedException If entry has been removed.
* @throws IgniteCheckedException If failed.
*/
@Nullable public CacheObject peek(boolean heap,
boolean offheap,
AffinityTopologyVersion topVer,
@Nullable IgniteCacheExpiryPolicy plc)
throws GridCacheEntryRemovedException, IgniteCheckedException;
/**
* Peeks into entry without loading value or updating statistics.
*
* @return Value.
* @throws GridCacheEntryRemovedException If entry has been removed.
* @throws IgniteCheckedException If failed.
*/
@Nullable public CacheObject peek()
throws GridCacheEntryRemovedException, IgniteCheckedException;
/**
* Sets new value if current version is <tt>0</tt>
*
* @param val New value.
* @param ver Version to use.
* @param ttl Time to live.
* @param expireTime Expiration time.
* @param preload Flag indicating whether entry is being preloaded.
* @param topVer Topology version.
* @param drType DR type.
* @param fromStore {@code True} if value was loaded from store.
* @param primary {@code True} if current node is primary for partition.
* @return {@code True} if initial value was set.
* @throws IgniteCheckedException In case of error.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
default boolean initialValue(CacheObject val,
GridCacheVersion ver,
long ttl,
long expireTime,
boolean preload,
AffinityTopologyVersion topVer,
GridDrType drType,
boolean fromStore,
boolean primary) throws IgniteCheckedException, GridCacheEntryRemovedException {
return initialValue(val, ver, ttl, expireTime, preload, topVer, drType, fromStore, primary, null);
}
/**
* Sets new value if current version is <tt>0</tt>
*
* @param val New value.
* @param ver Version to use.
* @param ttl Time to live.
* @param expireTime Expiration time.
* @param preload Flag indicating whether entry is being preloaded.
* @param topVer Topology version.
* @param drType DR type.
* @param fromStore {@code True} if value was loaded from store.
* @param primary {@code True} if current node is primary for partition.
* @param row Pre-created data row, associated with this cache entry.
* @return {@code True} if initial value was set.
* @throws IgniteCheckedException In case of error.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean initialValue(CacheObject val,
GridCacheVersion ver,
long ttl,
long expireTime,
boolean preload,
AffinityTopologyVersion topVer,
GridDrType drType,
boolean fromStore,
boolean primary,
@Nullable CacheDataRow row) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Create versioned entry for this cache entry.
*
* @param keepBinary Keep binary flag.
* @return Versioned entry.
* @throws IgniteCheckedException In case of error.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public <K, V> GridCacheVersionedEntryEx<K, V> versionedEntry(final boolean keepBinary)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Sets new value if passed in version matches the current version
* (used for read-through only).
*
* @param val New value.
* @param curVer Version to match or {@code null} if match is not required.
* @param newVer Version to set.
* @param loadExpiryPlc Expiry policy if entry is loaded from store.
* @param readerArgs Reader will be added if not null.
* @return Current version and value.
* @throws IgniteCheckedException If index could not be updated.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public EntryGetResult versionedValue(CacheObject val,
@Nullable GridCacheVersion curVer,
@Nullable GridCacheVersion newVer,
@Nullable IgniteCacheExpiryPolicy loadExpiryPlc,
@Nullable ReaderArguments readerArgs)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Checks if the candidate is either owner or pending.
*
* @param ver Candidate version to check.
* @return {@code True} if the candidate is either owner or pending.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean hasLockCandidate(GridCacheVersion ver) throws GridCacheEntryRemovedException;
/**
* Checks if the candidate is either owner or pending.
*
* @param threadId ThreadId.
* @return {@code True} if the candidate is either owner or pending.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean hasLockCandidate(long threadId) throws GridCacheEntryRemovedException;
/**
* @param exclude Exclude versions.
* @return {@code True} if lock is owned by any thread or node.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedByAny(GridCacheVersion... exclude) throws GridCacheEntryRemovedException;
/**
* @return {@code True} if lock is owned by current thread.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedByThread() throws GridCacheEntryRemovedException;
/**
* @param lockVer Lock ID.
* @param threadId Thread ID.
* @return {@code True} if locked either locally or by thread.
* @throws GridCacheEntryRemovedException If removed.
*/
public boolean lockedLocallyByIdOrThread(GridCacheVersion lockVer, long threadId)
throws GridCacheEntryRemovedException;
/**
*
* @param lockVer Lock ID to check.
* @return {@code True} if lock is owned by candidate.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedLocally(GridCacheVersion lockVer) throws GridCacheEntryRemovedException;
/**
* @param threadId Thread ID to check.
* @param exclude Version to exclude from check.
* @return {@code True} if lock is owned by given thread.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedByThread(long threadId, GridCacheVersion exclude) throws GridCacheEntryRemovedException;
/**
* @param threadId Thread ID to check.
* @return {@code True} if lock is owned by given thread.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedByThread(long threadId) throws GridCacheEntryRemovedException;
/**
* @param ver Version to check for ownership.
* @return {@code True} if owner has the specified version.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public boolean lockedBy(GridCacheVersion ver) throws GridCacheEntryRemovedException;
/**
* Will not fail for removed entries.
*
* @param threadId Thread ID to check.
* @return {@code True} if lock is owned by given thread.
*/
public boolean lockedByThreadUnsafe(long threadId);
/**
* @param ver Version to check for ownership.
* @return {@code True} if owner has the specified version.
*/
public boolean lockedByUnsafe(GridCacheVersion ver);
/**
*
* @param lockVer Lock ID to check.
* @return {@code True} if lock is owned by candidate.
*/
public boolean lockedLocallyUnsafe(GridCacheVersion lockVer);
/**
* @param ver Lock version to check.
* @return {@code True} if has candidate with given lock ID.
*/
public boolean hasLockCandidateUnsafe(GridCacheVersion ver);
/**
* @param threadId Thread ID.
* @return Local candidate.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public GridCacheMvccCandidate localCandidate(long threadId) throws GridCacheEntryRemovedException;
/**
* Gets all local candidates.
*
* @param exclude Versions to exclude from check.
* @return All local candidates.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public Collection<GridCacheMvccCandidate> localCandidates(@Nullable GridCacheVersion... exclude)
throws GridCacheEntryRemovedException;
/**
* Gets all remote versions.
*
* @param exclude Exclude version.
* @return All remote versions minus the excluded ones, if any.
*/
public Collection<GridCacheMvccCandidate> remoteMvccSnapshot(GridCacheVersion... exclude);
/**
* Gets lock candidate for given lock ID.
*
* @param ver Lock version.
* @return Lock candidate for given ID.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public GridCacheMvccCandidate candidate(GridCacheVersion ver) throws GridCacheEntryRemovedException;
/**
* @param nodeId Node ID.
* @param threadId Thread ID.
* @return Candidate.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public GridCacheMvccCandidate candidate(UUID nodeId, long threadId)
throws GridCacheEntryRemovedException;
/**
* @return Local owner.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public GridCacheMvccCandidate localOwner() throws GridCacheEntryRemovedException;
/**
* @return Value bytes.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public CacheObject valueBytes() throws GridCacheEntryRemovedException;
/**
* Gets cached serialized value bytes.
*
* @param ver Version for which to get value bytes.
* @return Serialized value bytes.
* @throws IgniteCheckedException If serialization failed.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public CacheObject valueBytes(@Nullable GridCacheVersion ver)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Update index from within entry lock, passing key, value, and expiration time to provided closure.
*
* @param clo Closure to apply to key, value, and expiration time.
* @throws IgniteCheckedException If failed.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public void updateIndex(
SchemaIndexCacheVisitorClosure clo
) throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @return Expire time, without accounting for transactions or removals.
*/
public long rawExpireTime();
/**
* @return Expiration time.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public long expireTime() throws GridCacheEntryRemovedException;
/**
* @return Expiration time. Does not check for entry obsolete flag.
*/
public long expireTimeUnlocked();
/**
* Callback from ttl processor to cache entry indicating that entry is expired.
*
* @param obsoleteVer Version to set obsolete if entry is expired.
* @throws GridCacheEntryRemovedException If entry was removed.
* @return {@code True} if this entry was expired as a result of this call.
*/
public boolean onTtlExpired(GridCacheVersion obsoleteVer) throws GridCacheEntryRemovedException;
/**
* @return Time to live, without accounting for transactions or removals.
*/
public long rawTtl();
/**
* @return Time to live.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public long ttl() throws GridCacheEntryRemovedException;
/**
* @param ver Version.
* @param expiryPlc Expiry policy.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
public void updateTtl(GridCacheVersion ver, IgniteCacheExpiryPolicy expiryPlc) throws GridCacheEntryRemovedException;
/**
* @param ver Version.
* @param ttl Time to live.
*/
public void updateTtl(@Nullable GridCacheVersion ver, long ttl) throws GridCacheEntryRemovedException;
/**
* @return Value.
* @throws IgniteCheckedException If failed to read from swap storage.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public CacheObject unswap()
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* @param row Already extracted value.
* @return Value.
* @throws IgniteCheckedException If failed to read from swap storage.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public CacheObject unswap(CacheDataRow row)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Unswap ignoring flags.
*
* @param needVal If {@code false} then do not need to deserialize value during unswap.
* @return Value.
* @throws IgniteCheckedException If failed.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@Nullable public CacheObject unswap(boolean needVal)
throws IgniteCheckedException, GridCacheEntryRemovedException;
/**
* Tests whether or not given metadata is set.
*
* @param key Key of the metadata to test.
* @return Whether or not given metadata is set.
*/
public boolean hasMeta(int key);
/**
* Gets metadata by key.
*
* @param key Metadata key.
* @param <V> Type of the value.
* @return Metadata value or {@code null}.
*/
@Nullable public <V> V meta(int key);
/**
* Adds a new metadata.
*
* @param key Metadata key.
* @param val Metadata value.
* @param <V> Type of the value.
* @return Metadata previously associated with given name, or
* {@code null} if there was none.
*/
@Nullable public <V> V addMeta(int key, V val);
/**
* Adds given metadata value only if it was absent.
*
* @param key Metadata key.
* @param val Value to add if it's not attached already.
* @param <V> Type of the value.
* @return {@code null} if new value was put, or current value if put didn't happen.
*/
@Nullable public <V> V putMetaIfAbsent(int key, V val);
/**
* Replaces given metadata with new {@code newVal} value only if its current value
* is equal to {@code curVal}. Otherwise, it is no-op.
*
* @param key Key of the metadata.
* @param curVal Current value to check.
* @param newVal New value.
* @return {@code true} if replacement occurred, {@code false} otherwise.
*/
public <V> boolean replaceMeta(int key, V curVal, V newVal);
/**
* Removes metadata by key.
*
* @param key Key of the metadata to remove.
* @param <V> Type of the value.
* @return Value of removed metadata or {@code null}.
*/
@Nullable public <V> V removeMeta(int key);
/**
* Removes metadata only if its current value is equal to {@code val} passed in.
*
* @param key key of metadata attribute.
* @param val Value to compare.
* @param <V> Value type.
* @return {@code True} if value was removed, {@code false} otherwise.
*/
public <V> boolean removeMeta(int key, V val);
/**
* Calls {@link GridDhtLocalPartition#onUnlock()} for this entry's partition.
*/
public void onUnlock();
/**
* Locks entry to protect from concurrent access.
* Intended to be used instead of inherent java synchronization.
* This allows to separate locking from unlocking in time and/or code units.
*
* @see GridCacheEntryEx#unlockEntry().
*/
public void lockEntry();
/**
* Unlocks entry previously locked by {@link GridCacheEntryEx#lockEntry()}.
*/
public void unlockEntry();
/**
* Locks entry to protect from concurrent access. Intended to be used instead of inherent java synchronization. This
* allows to separate locking from unlocking in time and/or code units.
*
* @param timeout period of waiting in millis;
* @return {@code true} if the lock was free and was acquired by the current thread, or the lock was already held by
* the current thread; and {@code false} if the waiting time elapsed before the lock could be acquired
* @see GridCacheEntryEx#unlockEntry().
*/
public boolean tryLockEntry(long timeout);
/**
* Tests whether the entry is locked currently.
*
* @see GridCacheEntryEx#lockEntry().
* @see GridCacheEntryEx#unlockEntry().
*
* @return {@code True} if the entry is locked.
*/
public boolean lockedByCurrentThread();
/**
* Touch this entry in its context's eviction manager.
*
*/
public void touch();
}
|
googleapis/google-cloud-java | 37,958 | java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/ListProcessorVersionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1/document_processor_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.documentai.v1;
/**
*
*
* <pre>
* Response message for the
* [ListProcessorVersions][google.cloud.documentai.v1.DocumentProcessorService.ListProcessorVersions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1.ListProcessorVersionsResponse}
*/
public final class ListProcessorVersionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.ListProcessorVersionsResponse)
ListProcessorVersionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListProcessorVersionsResponse.newBuilder() to construct.
private ListProcessorVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListProcessorVersionsResponse() {
processorVersions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListProcessorVersionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_ListProcessorVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_ListProcessorVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1.ListProcessorVersionsResponse.class,
com.google.cloud.documentai.v1.ListProcessorVersionsResponse.Builder.class);
}
public static final int PROCESSOR_VERSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.documentai.v1.ProcessorVersion> processorVersions_;
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.documentai.v1.ProcessorVersion>
getProcessorVersionsList() {
return processorVersions_;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.documentai.v1.ProcessorVersionOrBuilder>
getProcessorVersionsOrBuilderList() {
return processorVersions_;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
@java.lang.Override
public int getProcessorVersionsCount() {
return processorVersions_.size();
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.documentai.v1.ProcessorVersion getProcessorVersions(int index) {
return processorVersions_.get(index);
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.documentai.v1.ProcessorVersionOrBuilder getProcessorVersionsOrBuilder(
int index) {
return processorVersions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < processorVersions_.size(); i++) {
output.writeMessage(1, processorVersions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < processorVersions_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, processorVersions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1.ListProcessorVersionsResponse)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1.ListProcessorVersionsResponse other =
(com.google.cloud.documentai.v1.ListProcessorVersionsResponse) obj;
if (!getProcessorVersionsList().equals(other.getProcessorVersionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getProcessorVersionsCount() > 0) {
hash = (37 * hash) + PROCESSOR_VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getProcessorVersionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1.ListProcessorVersionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the
* [ListProcessorVersions][google.cloud.documentai.v1.DocumentProcessorService.ListProcessorVersions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1.ListProcessorVersionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.ListProcessorVersionsResponse)
com.google.cloud.documentai.v1.ListProcessorVersionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_ListProcessorVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_ListProcessorVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1.ListProcessorVersionsResponse.class,
com.google.cloud.documentai.v1.ListProcessorVersionsResponse.Builder.class);
}
// Construct using com.google.cloud.documentai.v1.ListProcessorVersionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (processorVersionsBuilder_ == null) {
processorVersions_ = java.util.Collections.emptyList();
} else {
processorVersions_ = null;
processorVersionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_ListProcessorVersionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1.ListProcessorVersionsResponse
getDefaultInstanceForType() {
return com.google.cloud.documentai.v1.ListProcessorVersionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1.ListProcessorVersionsResponse build() {
com.google.cloud.documentai.v1.ListProcessorVersionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1.ListProcessorVersionsResponse buildPartial() {
com.google.cloud.documentai.v1.ListProcessorVersionsResponse result =
new com.google.cloud.documentai.v1.ListProcessorVersionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.documentai.v1.ListProcessorVersionsResponse result) {
if (processorVersionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
processorVersions_ = java.util.Collections.unmodifiableList(processorVersions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.processorVersions_ = processorVersions_;
} else {
result.processorVersions_ = processorVersionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.documentai.v1.ListProcessorVersionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1.ListProcessorVersionsResponse) {
return mergeFrom((com.google.cloud.documentai.v1.ListProcessorVersionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.documentai.v1.ListProcessorVersionsResponse other) {
if (other
== com.google.cloud.documentai.v1.ListProcessorVersionsResponse.getDefaultInstance())
return this;
if (processorVersionsBuilder_ == null) {
if (!other.processorVersions_.isEmpty()) {
if (processorVersions_.isEmpty()) {
processorVersions_ = other.processorVersions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureProcessorVersionsIsMutable();
processorVersions_.addAll(other.processorVersions_);
}
onChanged();
}
} else {
if (!other.processorVersions_.isEmpty()) {
if (processorVersionsBuilder_.isEmpty()) {
processorVersionsBuilder_.dispose();
processorVersionsBuilder_ = null;
processorVersions_ = other.processorVersions_;
bitField0_ = (bitField0_ & ~0x00000001);
processorVersionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getProcessorVersionsFieldBuilder()
: null;
} else {
processorVersionsBuilder_.addAllMessages(other.processorVersions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.documentai.v1.ProcessorVersion m =
input.readMessage(
com.google.cloud.documentai.v1.ProcessorVersion.parser(),
extensionRegistry);
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
processorVersions_.add(m);
} else {
processorVersionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.documentai.v1.ProcessorVersion> processorVersions_ =
java.util.Collections.emptyList();
private void ensureProcessorVersionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
processorVersions_ =
new java.util.ArrayList<com.google.cloud.documentai.v1.ProcessorVersion>(
processorVersions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.documentai.v1.ProcessorVersion,
com.google.cloud.documentai.v1.ProcessorVersion.Builder,
com.google.cloud.documentai.v1.ProcessorVersionOrBuilder>
processorVersionsBuilder_;
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public java.util.List<com.google.cloud.documentai.v1.ProcessorVersion>
getProcessorVersionsList() {
if (processorVersionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(processorVersions_);
} else {
return processorVersionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public int getProcessorVersionsCount() {
if (processorVersionsBuilder_ == null) {
return processorVersions_.size();
} else {
return processorVersionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public com.google.cloud.documentai.v1.ProcessorVersion getProcessorVersions(int index) {
if (processorVersionsBuilder_ == null) {
return processorVersions_.get(index);
} else {
return processorVersionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder setProcessorVersions(
int index, com.google.cloud.documentai.v1.ProcessorVersion value) {
if (processorVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessorVersionsIsMutable();
processorVersions_.set(index, value);
onChanged();
} else {
processorVersionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder setProcessorVersions(
int index, com.google.cloud.documentai.v1.ProcessorVersion.Builder builderForValue) {
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
processorVersions_.set(index, builderForValue.build());
onChanged();
} else {
processorVersionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder addProcessorVersions(com.google.cloud.documentai.v1.ProcessorVersion value) {
if (processorVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessorVersionsIsMutable();
processorVersions_.add(value);
onChanged();
} else {
processorVersionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder addProcessorVersions(
int index, com.google.cloud.documentai.v1.ProcessorVersion value) {
if (processorVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessorVersionsIsMutable();
processorVersions_.add(index, value);
onChanged();
} else {
processorVersionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder addProcessorVersions(
com.google.cloud.documentai.v1.ProcessorVersion.Builder builderForValue) {
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
processorVersions_.add(builderForValue.build());
onChanged();
} else {
processorVersionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder addProcessorVersions(
int index, com.google.cloud.documentai.v1.ProcessorVersion.Builder builderForValue) {
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
processorVersions_.add(index, builderForValue.build());
onChanged();
} else {
processorVersionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder addAllProcessorVersions(
java.lang.Iterable<? extends com.google.cloud.documentai.v1.ProcessorVersion> values) {
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, processorVersions_);
onChanged();
} else {
processorVersionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder clearProcessorVersions() {
if (processorVersionsBuilder_ == null) {
processorVersions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
processorVersionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public Builder removeProcessorVersions(int index) {
if (processorVersionsBuilder_ == null) {
ensureProcessorVersionsIsMutable();
processorVersions_.remove(index);
onChanged();
} else {
processorVersionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public com.google.cloud.documentai.v1.ProcessorVersion.Builder getProcessorVersionsBuilder(
int index) {
return getProcessorVersionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public com.google.cloud.documentai.v1.ProcessorVersionOrBuilder getProcessorVersionsOrBuilder(
int index) {
if (processorVersionsBuilder_ == null) {
return processorVersions_.get(index);
} else {
return processorVersionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.documentai.v1.ProcessorVersionOrBuilder>
getProcessorVersionsOrBuilderList() {
if (processorVersionsBuilder_ != null) {
return processorVersionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(processorVersions_);
}
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public com.google.cloud.documentai.v1.ProcessorVersion.Builder addProcessorVersionsBuilder() {
return getProcessorVersionsFieldBuilder()
.addBuilder(com.google.cloud.documentai.v1.ProcessorVersion.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public com.google.cloud.documentai.v1.ProcessorVersion.Builder addProcessorVersionsBuilder(
int index) {
return getProcessorVersionsFieldBuilder()
.addBuilder(index, com.google.cloud.documentai.v1.ProcessorVersion.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of processors.
* </pre>
*
* <code>repeated .google.cloud.documentai.v1.ProcessorVersion processor_versions = 1;</code>
*/
public java.util.List<com.google.cloud.documentai.v1.ProcessorVersion.Builder>
getProcessorVersionsBuilderList() {
return getProcessorVersionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.documentai.v1.ProcessorVersion,
com.google.cloud.documentai.v1.ProcessorVersion.Builder,
com.google.cloud.documentai.v1.ProcessorVersionOrBuilder>
getProcessorVersionsFieldBuilder() {
if (processorVersionsBuilder_ == null) {
processorVersionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.documentai.v1.ProcessorVersion,
com.google.cloud.documentai.v1.ProcessorVersion.Builder,
com.google.cloud.documentai.v1.ProcessorVersionOrBuilder>(
processorVersions_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
processorVersions_ = null;
}
return processorVersionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Points to the next processor, otherwise empty.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.ListProcessorVersionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.ListProcessorVersionsResponse)
private static final com.google.cloud.documentai.v1.ListProcessorVersionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.ListProcessorVersionsResponse();
}
public static com.google.cloud.documentai.v1.ListProcessorVersionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListProcessorVersionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListProcessorVersionsResponse>() {
@java.lang.Override
public ListProcessorVersionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListProcessorVersionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListProcessorVersionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1.ListProcessorVersionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,969 | java-gkehub/proto-google-cloud-gkehub-v1beta1/src/main/java/com/google/cloud/gkehub/v1beta1/ResourceOptions.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta1/membership.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1beta1;
/**
*
*
* <pre>
* ResourceOptions represent options for Kubernetes resource generation.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.ResourceOptions}
*/
public final class ResourceOptions extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1beta1.ResourceOptions)
ResourceOptionsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ResourceOptions.newBuilder() to construct.
private ResourceOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ResourceOptions() {
connectVersion_ = "";
k8SVersion_ = "";
k8SGitVersion_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ResourceOptions();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_ResourceOptions_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_ResourceOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.ResourceOptions.class,
com.google.cloud.gkehub.v1beta1.ResourceOptions.Builder.class);
}
public static final int CONNECT_VERSION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object connectVersion_ = "";
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The connectVersion.
*/
@java.lang.Override
public java.lang.String getConnectVersion() {
java.lang.Object ref = connectVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
connectVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for connectVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getConnectVersionBytes() {
java.lang.Object ref = connectVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
connectVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int V1BETA1_CRD_FIELD_NUMBER = 2;
private boolean v1Beta1Crd_ = false;
/**
*
*
* <pre>
* Optional. Use `apiextensions/v1beta1` instead of `apiextensions/v1` for
* CustomResourceDefinition resources.
* This option should be set for clusters with Kubernetes apiserver versions
* <1.16.
* </pre>
*
* <code>bool v1beta1_crd = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The v1beta1Crd.
*/
@java.lang.Override
public boolean getV1Beta1Crd() {
return v1Beta1Crd_;
}
public static final int K8S_VERSION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object k8SVersion_ = "";
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The k8sVersion.
*/
@java.lang.Override
public java.lang.String getK8SVersion() {
java.lang.Object ref = k8SVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
k8SVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for k8sVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getK8SVersionBytes() {
java.lang.Object ref = k8SVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
k8SVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int K8S_GIT_VERSION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object k8SGitVersion_ = "";
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The k8sGitVersion.
*/
@java.lang.Override
public java.lang.String getK8SGitVersion() {
java.lang.Object ref = k8SGitVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
k8SGitVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for k8sGitVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getK8SGitVersionBytes() {
java.lang.Object ref = k8SGitVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
k8SGitVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, connectVersion_);
}
if (v1Beta1Crd_ != false) {
output.writeBool(2, v1Beta1Crd_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(k8SVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, k8SVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(k8SGitVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, k8SGitVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, connectVersion_);
}
if (v1Beta1Crd_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, v1Beta1Crd_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(k8SVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, k8SVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(k8SGitVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, k8SGitVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1beta1.ResourceOptions)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1beta1.ResourceOptions other =
(com.google.cloud.gkehub.v1beta1.ResourceOptions) obj;
if (!getConnectVersion().equals(other.getConnectVersion())) return false;
if (getV1Beta1Crd() != other.getV1Beta1Crd()) return false;
if (!getK8SVersion().equals(other.getK8SVersion())) return false;
if (!getK8SGitVersion().equals(other.getK8SGitVersion())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CONNECT_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getConnectVersion().hashCode();
hash = (37 * hash) + V1BETA1_CRD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getV1Beta1Crd());
hash = (37 * hash) + K8S_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getK8SVersion().hashCode();
hash = (37 * hash) + K8S_GIT_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getK8SGitVersion().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1beta1.ResourceOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ResourceOptions represent options for Kubernetes resource generation.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.ResourceOptions}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1beta1.ResourceOptions)
com.google.cloud.gkehub.v1beta1.ResourceOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_ResourceOptions_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_ResourceOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.ResourceOptions.class,
com.google.cloud.gkehub.v1beta1.ResourceOptions.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1beta1.ResourceOptions.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
connectVersion_ = "";
v1Beta1Crd_ = false;
k8SVersion_ = "";
k8SGitVersion_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_ResourceOptions_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.ResourceOptions getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1beta1.ResourceOptions.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.ResourceOptions build() {
com.google.cloud.gkehub.v1beta1.ResourceOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.ResourceOptions buildPartial() {
com.google.cloud.gkehub.v1beta1.ResourceOptions result =
new com.google.cloud.gkehub.v1beta1.ResourceOptions(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.v1beta1.ResourceOptions result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.connectVersion_ = connectVersion_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.v1Beta1Crd_ = v1Beta1Crd_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.k8SVersion_ = k8SVersion_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.k8SGitVersion_ = k8SGitVersion_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1beta1.ResourceOptions) {
return mergeFrom((com.google.cloud.gkehub.v1beta1.ResourceOptions) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1beta1.ResourceOptions other) {
if (other == com.google.cloud.gkehub.v1beta1.ResourceOptions.getDefaultInstance())
return this;
if (!other.getConnectVersion().isEmpty()) {
connectVersion_ = other.connectVersion_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getV1Beta1Crd() != false) {
setV1Beta1Crd(other.getV1Beta1Crd());
}
if (!other.getK8SVersion().isEmpty()) {
k8SVersion_ = other.k8SVersion_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getK8SGitVersion().isEmpty()) {
k8SGitVersion_ = other.k8SGitVersion_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
connectVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
v1Beta1Crd_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
k8SVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
k8SGitVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object connectVersion_ = "";
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The connectVersion.
*/
public java.lang.String getConnectVersion() {
java.lang.Object ref = connectVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
connectVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for connectVersion.
*/
public com.google.protobuf.ByteString getConnectVersionBytes() {
java.lang.Object ref = connectVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
connectVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The connectVersion to set.
* @return This builder for chaining.
*/
public Builder setConnectVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
connectVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearConnectVersion() {
connectVersion_ = getDefaultInstance().getConnectVersion();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The Connect agent version to use for connect_resources. Defaults
* to the latest GKE Connect version. The version must be a currently
* supported version, obsolete versions will be rejected.
* </pre>
*
* <code>string connect_version = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for connectVersion to set.
* @return This builder for chaining.
*/
public Builder setConnectVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
connectVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private boolean v1Beta1Crd_;
/**
*
*
* <pre>
* Optional. Use `apiextensions/v1beta1` instead of `apiextensions/v1` for
* CustomResourceDefinition resources.
* This option should be set for clusters with Kubernetes apiserver versions
* <1.16.
* </pre>
*
* <code>bool v1beta1_crd = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The v1beta1Crd.
*/
@java.lang.Override
public boolean getV1Beta1Crd() {
return v1Beta1Crd_;
}
/**
*
*
* <pre>
* Optional. Use `apiextensions/v1beta1` instead of `apiextensions/v1` for
* CustomResourceDefinition resources.
* This option should be set for clusters with Kubernetes apiserver versions
* <1.16.
* </pre>
*
* <code>bool v1beta1_crd = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The v1beta1Crd to set.
* @return This builder for chaining.
*/
public Builder setV1Beta1Crd(boolean value) {
v1Beta1Crd_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Use `apiextensions/v1beta1` instead of `apiextensions/v1` for
* CustomResourceDefinition resources.
* This option should be set for clusters with Kubernetes apiserver versions
* <1.16.
* </pre>
*
* <code>bool v1beta1_crd = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearV1Beta1Crd() {
bitField0_ = (bitField0_ & ~0x00000002);
v1Beta1Crd_ = false;
onChanged();
return this;
}
private java.lang.Object k8SVersion_ = "";
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The k8sVersion.
*/
public java.lang.String getK8SVersion() {
java.lang.Object ref = k8SVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
k8SVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for k8sVersion.
*/
public com.google.protobuf.ByteString getK8SVersionBytes() {
java.lang.Object ref = k8SVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
k8SVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The k8sVersion to set.
* @return This builder for chaining.
*/
public Builder setK8SVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
k8SVersion_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearK8SVersion() {
k8SVersion_ = getDefaultInstance().getK8SVersion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Major and minor version of the Kubernetes cluster. This is only
* used to determine which version to use for the CustomResourceDefinition
* resources, `apiextensions/v1beta1` or`apiextensions/v1`.
* </pre>
*
* <code>string k8s_version = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for k8sVersion to set.
* @return This builder for chaining.
*/
public Builder setK8SVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
k8SVersion_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object k8SGitVersion_ = "";
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The k8sGitVersion.
*/
public java.lang.String getK8SGitVersion() {
java.lang.Object ref = k8SGitVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
k8SGitVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for k8sGitVersion.
*/
public com.google.protobuf.ByteString getK8SGitVersionBytes() {
java.lang.Object ref = k8SGitVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
k8SGitVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The k8sGitVersion to set.
* @return This builder for chaining.
*/
public Builder setK8SGitVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
k8SGitVersion_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearK8SGitVersion() {
k8SGitVersion_ = getDefaultInstance().getK8SGitVersion();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Git version of the Kubernetes cluster. This is only used to gate
* the Connect Agent migration to svc.id.goog on GDC-SO 1.33.100 patch and
* above.
* </pre>
*
* <code>string k8s_git_version = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for k8sGitVersion to set.
* @return This builder for chaining.
*/
public Builder setK8SGitVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
k8SGitVersion_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1beta1.ResourceOptions)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1beta1.ResourceOptions)
private static final com.google.cloud.gkehub.v1beta1.ResourceOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1beta1.ResourceOptions();
}
public static com.google.cloud.gkehub.v1beta1.ResourceOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ResourceOptions> PARSER =
new com.google.protobuf.AbstractParser<ResourceOptions>() {
@java.lang.Override
public ResourceOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ResourceOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ResourceOptions> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.ResourceOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google-wallet/rest-samples | 37,836 | java/src/main/java/com/google/developers/wallet/rest/DemoLoyalty.java | /*
* Copyright 2022 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.developers.wallet.rest;
// [START setup]
// [START imports]
import com.auth0.jwt.JWT;
import com.auth0.jwt.algorithms.Algorithm;
import com.google.api.client.googleapis.batch.BatchRequest;
import com.google.api.client.googleapis.batch.json.JsonBatchCallback;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.*;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.services.walletobjects.*;
import com.google.api.services.walletobjects.model.*;
import com.google.auth.http.HttpCredentialsAdapter;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import java.io.*;
import java.security.interfaces.RSAPrivateKey;
import java.util.*;
// [END imports]
/** Demo class for creating and managing Loyalty cards in Google Wallet. */
public class DemoLoyalty {
/**
* Path to service account key file from Google Cloud Console. Environment variable:
* GOOGLE_APPLICATION_CREDENTIALS.
*/
public static String keyFilePath;
/** Service account credentials for Google Wallet APIs. */
public static GoogleCredentials credentials;
/** Google Wallet service client. */
public static Walletobjects service;
public DemoLoyalty() throws Exception {
keyFilePath =
System.getenv().getOrDefault("GOOGLE_APPLICATION_CREDENTIALS", "/path/to/key.json");
auth();
}
// [END setup]
// [START auth]
/**
* Create authenticated HTTP client using a service account file.
*
*/
public void auth() throws Exception {
credentials =
GoogleCredentials.fromStream(new FileInputStream(keyFilePath))
.createScoped(List.of(WalletobjectsScopes.WALLET_OBJECT_ISSUER));
credentials.refresh();
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
// Initialize Google Wallet API service
service =
new Walletobjects.Builder(
httpTransport,
GsonFactory.getDefaultInstance(),
new HttpCredentialsAdapter(credentials))
.setApplicationName("APPLICATION_NAME")
.build();
}
// [END auth]
// [START createClass]
/**
* Create a class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String createClass(String issuerId, String classSuffix) throws IOException {
// Check if the class exists
try {
service.loyaltyclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
System.out.printf("Class %s.%s already exists!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() != 404) {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/loyalty-cards/rest/v1/loyaltyclass
LoyaltyClass newClass =
new LoyaltyClass()
.setId(String.format("%s.%s", issuerId, classSuffix))
.setIssuerName("Issuer name")
.setReviewStatus("UNDER_REVIEW")
.setProgramName("Program name")
.setProgramLogo(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Logo description"))));
LoyaltyClass response = service.loyaltyclass().insert(newClass).execute();
System.out.println("Class insert response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END createClass]
// [START updateClass]
/**
* Update a class.
*
* <p><strong>Warning:</strong> This replaces all existing class attributes!
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String updateClass(String issuerId, String classSuffix) throws IOException {
LoyaltyClass updatedClass;
// Check if the class exists
try {
updatedClass =
service.loyaltyclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// Class exists
// Update the class by adding a homepage
updatedClass.setHomepageUri(
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("Homepage description"));
// Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates
updatedClass.setReviewStatus("UNDER_REVIEW");
LoyaltyClass response =
service
.loyaltyclass()
.update(String.format("%s.%s", issuerId, classSuffix), updatedClass)
.execute();
System.out.println("Class update response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END updateClass]
// [START patchClass]
/**
* Patch a class.
*
* <p>The PATCH method supports patch semantics.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String patchClass(String issuerId, String classSuffix) throws IOException {
// Check if the class exists
try {
service.loyaltyclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// Class exists
// Patch the class by adding a homepage
LoyaltyClass patchBody =
new LoyaltyClass()
.setHomepageUri(
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("Homepage description"))
// Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates
.setReviewStatus("UNDER_REVIEW");
LoyaltyClass response =
service
.loyaltyclass()
.patch(String.format("%s.%s", issuerId, classSuffix), patchBody)
.execute();
System.out.println("Class patch response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END patchClass]
// [START addMessageClass]
/**
* Add a message to a pass class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param header The message header.
* @param body The message body.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String addClassMessage(String issuerId, String classSuffix, String header, String body)
throws IOException {
// Check if the class exists
try {
service.loyaltyclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
AddMessageRequest message =
new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body));
LoyaltyClassAddMessageResponse response =
service
.loyaltyclass()
.addmessage(String.format("%s.%s", issuerId, classSuffix), message)
.execute();
System.out.println("Class addMessage response");
System.out.println(response.toPrettyString());
return String.format("%s.%s", issuerId, classSuffix);
}
// [END addMessageClass]
// [START createObject]
/**
* Create an object.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String createObject(String issuerId, String classSuffix, String objectSuffix)
throws IOException {
// Check if the object exists
try {
service.loyaltyobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
System.out.printf("Object %s.%s already exists!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() != 404) {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/loyalty-cards/rest/v1/loyaltyobject
LoyaltyObject newObject =
new LoyaltyObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setAccountId("Account ID")
.setAccountName("Account name")
.setLoyaltyPoints(
new LoyaltyPoints()
.setLabel("Points")
.setBalance(new LoyaltyPointsBalance().setInt(800)));
LoyaltyObject response = service.loyaltyobject().insert(newObject).execute();
System.out.println("Object insert response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END createObject]
// [START updateObject]
/**
* Update an object.
*
* <p><strong>Warning:</strong> This replaces all existing object attributes!
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String updateObject(String issuerId, String objectSuffix) throws IOException {
LoyaltyObject updatedObject;
// Check if the object exists
try {
updatedObject =
service.loyaltyobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Object exists
// Update the object by adding a link
Uri newLink =
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("New link description");
if (updatedObject.getLinksModuleData() == null) {
// LinksModuleData was not set on the original object
updatedObject.setLinksModuleData(new LinksModuleData().setUris(List.of(newLink)));
} else {
updatedObject.getLinksModuleData().getUris().add(newLink);
}
LoyaltyObject response =
service
.loyaltyobject()
.update(String.format("%s.%s", issuerId, objectSuffix), updatedObject)
.execute();
System.out.println("Object update response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END updateObject]
// [START patchObject]
/**
* Patch an object.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String patchObject(String issuerId, String objectSuffix) throws IOException {
LoyaltyObject existingObject;
// Check if the object exists
try {
existingObject =
service.loyaltyobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Object exists
// Patch the object by adding a link
Uri newLink =
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("New link description");
LoyaltyObject patchBody = new LoyaltyObject();
if (existingObject.getLinksModuleData() == null) {
// LinksModuleData was not set on the original object
patchBody.setLinksModuleData(new LinksModuleData().setUris(new ArrayList<Uri>()));
} else {
patchBody.setLinksModuleData(existingObject.getLinksModuleData());
}
patchBody.getLinksModuleData().getUris().add(newLink);
LoyaltyObject response =
service
.loyaltyobject()
.patch(String.format("%s.%s", issuerId, objectSuffix), patchBody)
.execute();
System.out.println("Object patch response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END patchObject]
// [START expireObject]
/**
* Expire an object.
*
* <p>Sets the object's state to Expired. If the valid time interval is already set, the pass will
* expire automatically up to 24 hours after.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String expireObject(String issuerId, String objectSuffix) throws IOException {
// Check if the object exists
try {
service.loyaltyobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Patch the object, setting the pass as expired
LoyaltyObject patchBody = new LoyaltyObject().setState("EXPIRED");
LoyaltyObject response =
service
.loyaltyobject()
.patch(String.format("%s.%s", issuerId, objectSuffix), patchBody)
.execute();
System.out.println("Object expiration response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END expireObject]
// [START addMessageObject]
/**
* Add a message to a pass object.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @param header The message header.
* @param body The message body.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String addObjectMessage(String issuerId, String objectSuffix, String header, String body)
throws IOException {
// Check if the object exists
try {
service.loyaltyobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
AddMessageRequest message =
new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body));
LoyaltyObjectAddMessageResponse response =
service
.loyaltyobject()
.addmessage(String.format("%s.%s", issuerId, objectSuffix), message)
.execute();
System.out.println("Object addMessage response");
System.out.println(response.toPrettyString());
return String.format("%s.%s", issuerId, objectSuffix);
}
// [END addMessageObject]
// [START jwtNew]
/**
* Generate a signed JWT that creates a new pass class and object.
*
* <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the
* pass class and object defined in the JWT are created. This allows you to create multiple pass
* classes and objects in one API call when the user saves the pass to their wallet.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param objectSuffix Developer-defined unique ID for the pass object.
* @return An "Add to Google Wallet" link.
*/
public String createJWTNewObjects(String issuerId, String classSuffix, String objectSuffix) {
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/loyalty-cards/rest/v1/loyaltyclass
LoyaltyClass newClass =
new LoyaltyClass()
.setId(String.format("%s.%s", issuerId, classSuffix))
.setIssuerName("Issuer name")
.setReviewStatus("UNDER_REVIEW")
.setProgramName("Program name")
.setProgramLogo(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Logo description"))));
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/loyalty-cards/rest/v1/loyaltyobject
LoyaltyObject newObject =
new LoyaltyObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setAccountId("Account ID")
.setAccountName("Account name")
.setLoyaltyPoints(
new LoyaltyPoints()
.setLabel("Points")
.setBalance(new LoyaltyPointsBalance().setInt(800)));
// Create the JWT as a HashMap object
HashMap<String, Object> claims = new HashMap<String, Object>();
claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail());
claims.put("aud", "google");
claims.put("origins", List.of("www.example.com"));
claims.put("typ", "savetowallet");
// Create the Google Wallet payload and add to the JWT
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("loyaltyClasses", List.of(newClass));
payload.put("loyaltyObjects", List.of(newObject));
claims.put("payload", payload);
// The service account credentials are used to sign the JWT
Algorithm algorithm =
Algorithm.RSA256(
null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey());
String token = JWT.create().withPayload(claims).sign(algorithm);
System.out.println("Add to Google Wallet link");
System.out.printf("https://pay.google.com/gp/v/save/%s%n", token);
return String.format("https://pay.google.com/gp/v/save/%s", token);
}
// [END jwtNew]
// [START jwtExisting]
/**
* Generate a signed JWT that references an existing pass object.
*
* <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the
* pass objects defined in the JWT are added to the user's Google Wallet app. This allows the user
* to save multiple pass objects in one API call.
*
* <p>The objects to add must follow the below format:
*
* <p>{ 'id': 'ISSUER_ID.OBJECT_SUFFIX', 'classId': 'ISSUER_ID.CLASS_SUFFIX' }
*
* @param issuerId The issuer ID being used for this request.
* @return An "Add to Google Wallet" link.
*/
public String createJWTExistingObjects(String issuerId) {
// Multiple pass types can be added at the same time
// At least one type must be specified in the JWT claims
// Note: Make sure to replace the placeholder class and object suffixes
HashMap<String, Object> objectsToAdd = new HashMap<String, Object>();
// Event tickets
objectsToAdd.put(
"eventTicketObjects",
List.of(
new EventTicketObject()
.setId(String.format("%s.%s", issuerId, "EVENT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "EVENT_CLASS_SUFFIX"))));
// Boarding passes
objectsToAdd.put(
"flightObjects",
List.of(
new FlightObject()
.setId(String.format("%s.%s", issuerId, "FLIGHT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "FLIGHT_CLASS_SUFFIX"))));
// Generic passes
objectsToAdd.put(
"genericObjects",
List.of(
new GenericObject()
.setId(String.format("%s.%s", issuerId, "GENERIC_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "GENERIC_CLASS_SUFFIX"))));
// Gift cards
objectsToAdd.put(
"giftCardObjects",
List.of(
new GiftCardObject()
.setId(String.format("%s.%s", issuerId, "GIFT_CARD_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "GIFT_CARD_CLASS_SUFFIX"))));
// Loyalty cards
objectsToAdd.put(
"loyaltyObjects",
List.of(
new LoyaltyObject()
.setId(String.format("%s.%s", issuerId, "LOYALTY_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "LOYALTY_CLASS_SUFFIX"))));
// Offers
objectsToAdd.put(
"offerObjects",
List.of(
new OfferObject()
.setId(String.format("%s.%s", issuerId, "OFFER_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "OFFER_CLASS_SUFFIX"))));
// Transit passes
objectsToAdd.put(
"transitObjects",
List.of(
new TransitObject()
.setId(String.format("%s.%s", issuerId, "TRANSIT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "TRANSIT_CLASS_SUFFIX"))));
// Create the JWT as a HashMap object
HashMap<String, Object> claims = new HashMap<String, Object>();
claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail());
claims.put("aud", "google");
claims.put("origins", List.of("www.example.com"));
claims.put("typ", "savetowallet");
claims.put("payload", objectsToAdd);
// The service account credentials are used to sign the JWT
Algorithm algorithm =
Algorithm.RSA256(
null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey());
String token = JWT.create().withPayload(claims).sign(algorithm);
System.out.println("Add to Google Wallet link");
System.out.printf("https://pay.google.com/gp/v/save/%s%n", token);
return String.format("https://pay.google.com/gp/v/save/%s", token);
}
// [END jwtExisting]
// [START batch]
/**
* Batch create Google Wallet objects from an existing class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
*/
public void BatchCreateObjects(String issuerId, String classSuffix) throws IOException {
// Create the batch request client
BatchRequest batch = service.batch(new HttpCredentialsAdapter(credentials));
// The callback will be invoked for each request in the batch
JsonBatchCallback<LoyaltyObject> callback =
new JsonBatchCallback<LoyaltyObject>() {
// Invoked if the request was successful
public void onSuccess(LoyaltyObject response, HttpHeaders responseHeaders) {
System.out.println("Batch insert response");
System.out.println(response.toString());
}
// Invoked if the request failed
public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) {
System.out.println("Error Message: " + e.getMessage());
}
};
// Example: Generate three new pass objects
for (int i = 0; i < 3; i++) {
// Generate a random object suffix
String objectSuffix = UUID.randomUUID().toString().replaceAll("[^\\w.-]", "_");
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/loyalty-cards/rest/v1/loyaltyobject
LoyaltyObject batchObject =
new LoyaltyObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setAccountId("Account ID")
.setAccountName("Account name")
.setLoyaltyPoints(
new LoyaltyPoints()
.setLabel("Points")
.setBalance(new LoyaltyPointsBalance().setInt(800)));
service.loyaltyobject().insert(batchObject).queue(batch, callback);
}
// Invoke the batch API calls
batch.execute();
}
// [END batch]
}
|
apache/james-project | 38,285 | mailbox/cassandra/src/test/java/org/apache/james/mailbox/cassandra/CassandraMailboxManagerConsistencyTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.mailbox.cassandra;
import static org.apache.james.backends.cassandra.Scenario.Builder.fail;
import static org.apache.james.backends.cassandra.Scenario.NOTHING;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.james.backends.cassandra.CassandraCluster;
import org.apache.james.backends.cassandra.CassandraClusterExtension;
import org.apache.james.backends.cassandra.init.configuration.CassandraConfiguration;
import org.apache.james.core.Username;
import org.apache.james.junit.categories.Unstable;
import org.apache.james.mailbox.MailboxSession;
import org.apache.james.mailbox.cassandra.mail.MailboxAggregateModule;
import org.apache.james.mailbox.model.MailboxId;
import org.apache.james.mailbox.model.MailboxPath;
import org.apache.james.mailbox.model.search.MailboxQuery;
import org.apache.james.mailbox.model.search.Wildcard;
import org.apache.james.mailbox.store.PreDeletionHooks;
import org.assertj.core.api.SoftAssertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.github.fge.lambdas.Throwing;
import com.github.fge.lambdas.runnable.ThrowingRunnable;
class CassandraMailboxManagerConsistencyTest {
private static final Username USER = Username.of("user");
private static final String INBOX = "INBOX";
private static final String INBOX_RENAMED = "INBOX_RENAMED";
private static final int TRY_COUNT_BEFORE_FAILURE = 6;
@RegisterExtension
static CassandraClusterExtension cassandra = new CassandraClusterExtension(MailboxAggregateModule.MODULE_WITH_QUOTA);
private CassandraMailboxManager testee;
private MailboxSession mailboxSession;
private MailboxPath inboxPath;
private MailboxPath inboxPathRenamed;
private MailboxQuery.UserBound allMailboxesSearchQuery;
@BeforeEach
void setUp(CassandraCluster cassandra) {
testee = CassandraMailboxManagerProvider.provideMailboxManager(
cassandra,
PreDeletionHooks.NO_PRE_DELETION_HOOK,
CassandraConfiguration.builder()
.mailboxReadRepair(0)
.build());
mailboxSession = testee.createSystemSession(USER);
inboxPath = MailboxPath.forUser(USER, INBOX);
inboxPathRenamed = MailboxPath.forUser(USER, INBOX_RENAMED);
allMailboxesSearchQuery = MailboxQuery.builder()
.userAndNamespaceFrom(inboxPath)
.expression(Wildcard.INSTANCE)
.build()
.asUserBound();
}
@Nested
class FailuresDuringCreation {
@Disabled("For performance reasons we don't validate path reads against mailbox table")
@Test
void createMailboxShouldBeConsistentWhenMailboxDaoFails(CassandraCluster cassandra) {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
@Test
void createMailboxShouldBeConsistentWhenMailboxPathDaoFails(CassandraCluster cassandra) {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxpathv3"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
@Disabled("JAMES-3056 createMailbox() doesn't return mailboxId while it's supposed to")
@Test
void createMailboxAfterAFailedCreationShouldCreateTheMailboxWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
assertThat(testee.createMailbox(inboxPath, mailboxSession))
.isNotEmpty();
}
@Test
void createMailboxAfterAFailedCreationShouldCreateTheMailboxWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxpathv3"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Disabled("JAMES-3056 createMailbox() doesn't return mailboxId while it's supposed to")
@Test
void createMailboxAfterDeletingShouldCreateTheMailboxWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
assertThat(testee.createMailbox(inboxPath, mailboxSession))
.isNotEmpty();
}
@Test
void createMailboxAfterDeletingShouldCreateTheMailboxWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxPathv2"));
doQuietly(() -> testee.createMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
}
@Nested
class FailuresDuringRenaming {
@Disabled("For performance reasons we don't validate path reads against mailbox table")
@Test
void renameShouldBeConsistentWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
void renameShouldBeConsistentWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxpathv3"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Disabled("JAMES-3056 cannot create a new mailbox because 'INBOX_RENAMED' already exists")
@Test
void createNewMailboxAfterAFailedRenameShouldCreateThatMailboxWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId newMailboxId = testee.createMailbox(inboxPathRenamed, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasSize(2)
.anySatisfy(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
})
.anySatisfy(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(newMailboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPathRenamed);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactlyInAnyOrder(inboxPath, inboxPathRenamed);
}));
}
@Test
void createNewMailboxAfterAFailedRenameShouldCreateThatMailboxWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxpathv3"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId newMailboxId = testee.createMailbox(inboxPathRenamed, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasSize(2)
.anySatisfy(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
})
.anySatisfy(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(newMailboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPathRenamed);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactlyInAnyOrder(inboxPath, inboxPathRenamed);
}));
}
@Disabled("JAMES-3056 creating mailbox returns an empty Optional")
@Test
void deleteMailboxAfterAFailedRenameShouldCreateThatMailboxWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
testee.deleteMailbox(inboxId, mailboxSession);
assertThat(testee.createMailbox(inboxPathRenamed, mailboxSession))
.isNotEmpty();
}
@Test
void deleteMailboxAfterAFailedRenameShouldCreateThatMailboxWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailboxPathV2"));
doQuietly(() -> testee.renameMailbox(inboxPath, inboxPathRenamed, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
testee.deleteMailbox(inboxId, mailboxSession);
MailboxId newMailboxId = testee.createMailbox(inboxPathRenamed, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(newMailboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPathRenamed);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactlyInAnyOrder(inboxPathRenamed);
}));
}
}
@Nested
class FailuresOnDeletion {
@Nested
class DeleteOnce {
@Disabled("JAMES-3056 allMailboxesSearchQuery returns empty list")
@Test
void deleteMailboxByPathShouldBeConsistentWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Disabled("JAMES-3056 allMailboxesSearchQuery returns empty list")
@Test
void deleteMailboxByIdShouldBeConsistentWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
@Tag(Unstable.TAG)
//see https://builds.apache.org/blue/organizations/jenkins/james%2FApacheJames/detail/PR-268/32/tests
void deleteMailboxByPathShouldBeConsistentWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
void deleteMailboxByIdShouldBeConsistentWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
}
@Nested
class DeleteOnceThenCreate {
@Test
void createMailboxShouldCreateWhenMailboxDaoFailsOnDeleteByPath(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
void createMailboxShouldCreateWhenMailboxDaoFailsOnDeleteById(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Disabled("JAMES-3056 cannot create because mailbox already exists")
@Test
void createMailboxShouldCreateWhenMailboxPathDaoFailsOnDeleteByPath(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV2"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Disabled("JAMES-3056 cannot create because mailbox already exists")
@Test
void createMailboxShouldCreateWhenMailboxPathDaoFailsOnDeleteById(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV2"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
}
@Nested
class DeleteTwice {
@Disabled("JAMES-3056 list() returns one element with inboxPath")
@Test
void deleteMailboxByPathShouldDeleteWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
@Test
void deleteMailboxByIdShouldDeleteWhenMailboxDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
@Test
void deleteMailboxByPathShouldDeleteWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV3"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
@Test
void deleteMailboxByIdShouldDeleteWhenMailboxPathDaoFails(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV2"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.isEmpty();
softly.assertThat(testee.list(mailboxSession))
.isEmpty();
}));
}
}
@Nested
class DeleteTwiceThenCreate {
@Test
void createMailboxShouldCreateWhenMailboxDaoFailsOnDeleteByPath(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
void createMailboxShouldCreateWhenMailboxDaoFailsOnDeleteById(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id;"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Tag(Unstable.TAG)
@Test
void createMailboxShouldCreateWhenMailboxPathDaoFailsOnDeleteByPath(CassandraCluster cassandra) throws Exception {
testee.createMailbox(inboxPath, mailboxSession);
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV2"));
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxPath, mailboxSession));
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
@Test
void createMailboxShouldCreateWhenMailboxPathDaoFailsOnDeleteById(CassandraCluster cassandra) throws Exception {
MailboxId inboxId = testee.createMailbox(inboxPath, mailboxSession)
.get();
cassandra.getConf().registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxPathV2"));
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
cassandra.getConf().registerScenario(NOTHING);
doQuietly(() -> testee.deleteMailbox(inboxId, mailboxSession));
MailboxId inboxNewId = testee.createMailbox(inboxPath, mailboxSession)
.get();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(testee.search(allMailboxesSearchQuery, mailboxSession).toStream())
.hasOnlyOneElementSatisfying(mailboxMetaData -> {
softly.assertThat(mailboxMetaData.getId()).isEqualTo(inboxNewId);
softly.assertThat(mailboxMetaData.getPath()).isEqualTo(inboxPath);
});
softly.assertThat(testee.list(mailboxSession))
.containsExactly(inboxPath);
}));
}
}
}
private void doQuietly(ThrowingRunnable runnable) {
try {
runnable.run();
} catch (Throwable th) {
// ignore
}
}
}
|
googleapis/google-cloud-java | 37,921 | java-cloudsecuritycompliance/proto-google-cloud-cloudsecuritycompliance-v1/src/main/java/com/google/cloud/cloudsecuritycompliance/v1/ListFrameworksResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/cloudsecuritycompliance/v1/config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.cloudsecuritycompliance.v1;
/**
*
*
* <pre>
* Response message for listing Frameworks.
* Contains a paginated list of Framework resources.
* </pre>
*
* Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse}
*/
public final class ListFrameworksResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse)
ListFrameworksResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFrameworksResponse.newBuilder() to construct.
private ListFrameworksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFrameworksResponse() {
frameworks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFrameworksResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto
.internal_static_google_cloud_cloudsecuritycompliance_v1_ListFrameworksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto
.internal_static_google_cloud_cloudsecuritycompliance_v1_ListFrameworksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse.class,
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse.Builder.class);
}
public static final int FRAMEWORKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.cloudsecuritycompliance.v1.Framework> frameworks_;
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.cloudsecuritycompliance.v1.Framework> getFrameworksList() {
return frameworks_;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>
getFrameworksOrBuilderList() {
return frameworks_;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
@java.lang.Override
public int getFrameworksCount() {
return frameworks_.size();
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.Framework getFrameworks(int index) {
return frameworks_.get(index);
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder getFrameworksOrBuilder(
int index) {
return frameworks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < frameworks_.size(); i++) {
output.writeMessage(1, frameworks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < frameworks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, frameworks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse)) {
return super.equals(obj);
}
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse other =
(com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse) obj;
if (!getFrameworksList().equals(other.getFrameworksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFrameworksCount() > 0) {
hash = (37 * hash) + FRAMEWORKS_FIELD_NUMBER;
hash = (53 * hash) + getFrameworksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for listing Frameworks.
* Contains a paginated list of Framework resources.
* </pre>
*
* Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse)
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto
.internal_static_google_cloud_cloudsecuritycompliance_v1_ListFrameworksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto
.internal_static_google_cloud_cloudsecuritycompliance_v1_ListFrameworksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse.class,
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse.Builder.class);
}
// Construct using
// com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (frameworksBuilder_ == null) {
frameworks_ = java.util.Collections.emptyList();
} else {
frameworks_ = null;
frameworksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto
.internal_static_google_cloud_cloudsecuritycompliance_v1_ListFrameworksResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
getDefaultInstanceForType() {
return com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse build() {
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse buildPartial() {
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse result =
new com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse result) {
if (frameworksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
frameworks_ = java.util.Collections.unmodifiableList(frameworks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.frameworks_ = frameworks_;
} else {
result.frameworks_ = frameworksBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse) {
return mergeFrom(
(com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse other) {
if (other
== com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
.getDefaultInstance()) return this;
if (frameworksBuilder_ == null) {
if (!other.frameworks_.isEmpty()) {
if (frameworks_.isEmpty()) {
frameworks_ = other.frameworks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFrameworksIsMutable();
frameworks_.addAll(other.frameworks_);
}
onChanged();
}
} else {
if (!other.frameworks_.isEmpty()) {
if (frameworksBuilder_.isEmpty()) {
frameworksBuilder_.dispose();
frameworksBuilder_ = null;
frameworks_ = other.frameworks_;
bitField0_ = (bitField0_ & ~0x00000001);
frameworksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFrameworksFieldBuilder()
: null;
} else {
frameworksBuilder_.addAllMessages(other.frameworks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.cloudsecuritycompliance.v1.Framework m =
input.readMessage(
com.google.cloud.cloudsecuritycompliance.v1.Framework.parser(),
extensionRegistry);
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
frameworks_.add(m);
} else {
frameworksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.cloudsecuritycompliance.v1.Framework> frameworks_ =
java.util.Collections.emptyList();
private void ensureFrameworksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
frameworks_ =
new java.util.ArrayList<com.google.cloud.cloudsecuritycompliance.v1.Framework>(
frameworks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.cloudsecuritycompliance.v1.Framework,
com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder,
com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>
frameworksBuilder_;
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public java.util.List<com.google.cloud.cloudsecuritycompliance.v1.Framework>
getFrameworksList() {
if (frameworksBuilder_ == null) {
return java.util.Collections.unmodifiableList(frameworks_);
} else {
return frameworksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public int getFrameworksCount() {
if (frameworksBuilder_ == null) {
return frameworks_.size();
} else {
return frameworksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public com.google.cloud.cloudsecuritycompliance.v1.Framework getFrameworks(int index) {
if (frameworksBuilder_ == null) {
return frameworks_.get(index);
} else {
return frameworksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder setFrameworks(
int index, com.google.cloud.cloudsecuritycompliance.v1.Framework value) {
if (frameworksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFrameworksIsMutable();
frameworks_.set(index, value);
onChanged();
} else {
frameworksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder setFrameworks(
int index, com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder builderForValue) {
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
frameworks_.set(index, builderForValue.build());
onChanged();
} else {
frameworksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder addFrameworks(com.google.cloud.cloudsecuritycompliance.v1.Framework value) {
if (frameworksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFrameworksIsMutable();
frameworks_.add(value);
onChanged();
} else {
frameworksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder addFrameworks(
int index, com.google.cloud.cloudsecuritycompliance.v1.Framework value) {
if (frameworksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFrameworksIsMutable();
frameworks_.add(index, value);
onChanged();
} else {
frameworksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder addFrameworks(
com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder builderForValue) {
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
frameworks_.add(builderForValue.build());
onChanged();
} else {
frameworksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder addFrameworks(
int index, com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder builderForValue) {
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
frameworks_.add(index, builderForValue.build());
onChanged();
} else {
frameworksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder addAllFrameworks(
java.lang.Iterable<? extends com.google.cloud.cloudsecuritycompliance.v1.Framework>
values) {
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, frameworks_);
onChanged();
} else {
frameworksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder clearFrameworks() {
if (frameworksBuilder_ == null) {
frameworks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
frameworksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public Builder removeFrameworks(int index) {
if (frameworksBuilder_ == null) {
ensureFrameworksIsMutable();
frameworks_.remove(index);
onChanged();
} else {
frameworksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder getFrameworksBuilder(
int index) {
return getFrameworksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder getFrameworksOrBuilder(
int index) {
if (frameworksBuilder_ == null) {
return frameworks_.get(index);
} else {
return frameworksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public java.util.List<? extends com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>
getFrameworksOrBuilderList() {
if (frameworksBuilder_ != null) {
return frameworksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(frameworks_);
}
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder addFrameworksBuilder() {
return getFrameworksFieldBuilder()
.addBuilder(com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder addFrameworksBuilder(
int index) {
return getFrameworksFieldBuilder()
.addBuilder(
index, com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Framework resources.
* </pre>
*
* <code>repeated .google.cloud.cloudsecuritycompliance.v1.Framework frameworks = 1;</code>
*/
public java.util.List<com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder>
getFrameworksBuilderList() {
return getFrameworksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.cloudsecuritycompliance.v1.Framework,
com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder,
com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>
getFrameworksFieldBuilder() {
if (frameworksBuilder_ == null) {
frameworksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.cloudsecuritycompliance.v1.Framework,
com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder,
com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>(
frameworks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
frameworks_ = null;
}
return frameworksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token. To retrieve the next page of results, call the method
* again with this token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse)
private static final com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse();
}
public static com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFrameworksResponse> PARSER =
new com.google.protobuf.AbstractParser<ListFrameworksResponse>() {
@java.lang.Override
public ListFrameworksResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFrameworksResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFrameworksResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/ExoPlayer | 38,190 | library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED;
import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.lang.Math.max;
import android.annotation.SuppressLint;
import android.content.Context;
import android.media.AudioDeviceInfo;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Handler;
import androidx.annotation.CallSuper;
import androidx.annotation.DoNotInline;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.audio.AudioSink.InitializationException;
import com.google.android.exoplayer2.audio.AudioSink.WriteException;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation;
import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons;
import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.List;
/**
* Decodes and renders audio using {@link MediaCodec} and an {@link AudioSink}.
*
* <p>This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
* on the playback thread:
*
* <ul>
* <li>Message with type {@link #MSG_SET_VOLUME} to set the volume. The message payload should be
* a {@link Float} with 0 being silence and 1 being unity gain.
* <li>Message with type {@link #MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The
* message payload should be an {@link AudioAttributes} instance that will configure the
* underlying audio track.
* <li>Message with type {@link #MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The message
* payload should be an {@link AuxEffectInfo} instance that will configure the underlying
* audio track.
* <li>Message with type {@link #MSG_SET_SKIP_SILENCE_ENABLED} to enable or disable skipping
* silences. The message payload should be a {@link Boolean}.
* <li>Message with type {@link #MSG_SET_AUDIO_SESSION_ID} to set the audio session ID. The
* message payload should be a session ID {@link Integer} that will be attached to the
* underlying audio track.
* </ul>
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@Deprecated
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
private static final String TAG = "MediaCodecAudioRenderer";
/**
* Custom key used to indicate bits per sample by some decoders on Vivo devices. For example
* OMX.vivo.alac.decoder on the Vivo Z1 Pro.
*/
private static final String VIVO_BITS_PER_SAMPLE_KEY = "v-bits-per-sample";
private final Context context;
private final EventDispatcher eventDispatcher;
private final AudioSink audioSink;
private int codecMaxInputSize;
private boolean codecNeedsDiscardChannelsWorkaround;
@Nullable private Format inputFormat;
/** Codec used for DRM decryption only in passthrough and offload. */
@Nullable private Format decryptOnlyCodecFormat;
private long currentPositionUs;
private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
private boolean audioSinkNeedsReset;
private boolean experimentalKeepAudioTrackOnSeek;
@Nullable private WakeupListener wakeupListener;
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
*/
public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
this(context, mediaCodecSelector, /* eventHandler= */ null, /* eventListener= */ null);
}
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
public MediaCodecAudioRenderer(
Context context,
MediaCodecSelector mediaCodecSelector,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener) {
this(
context,
mediaCodecSelector,
eventHandler,
eventListener,
AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES);
}
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. Use {@link
* AudioCapabilities#DEFAULT_AUDIO_CAPABILITIES} if default capabilities (no encoded audio
* passthrough support) should be assumed.
* @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* output.
*/
public MediaCodecAudioRenderer(
Context context,
MediaCodecSelector mediaCodecSelector,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioCapabilities audioCapabilities,
AudioProcessor... audioProcessors) {
this(
context,
mediaCodecSelector,
eventHandler,
eventListener,
new DefaultAudioSink.Builder()
.setAudioCapabilities( // For backward compatibility, null == default.
firstNonNull(audioCapabilities, AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES))
.setAudioProcessors(audioProcessors)
.build());
}
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output.
*/
public MediaCodecAudioRenderer(
Context context,
MediaCodecSelector mediaCodecSelector,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioSink audioSink) {
this(
context,
MediaCodecAdapter.Factory.DEFAULT,
mediaCodecSelector,
/* enableDecoderFallback= */ false,
eventHandler,
eventListener,
audioSink);
}
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output.
*/
public MediaCodecAudioRenderer(
Context context,
MediaCodecSelector mediaCodecSelector,
boolean enableDecoderFallback,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioSink audioSink) {
this(
context,
MediaCodecAdapter.Factory.DEFAULT,
mediaCodecSelector,
enableDecoderFallback,
eventHandler,
eventListener,
audioSink);
}
/**
* Creates a new instance.
*
* @param context A context.
* @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link
* MediaCodecAdapter} instances.
* @param mediaCodecSelector A decoder selector.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output.
*/
public MediaCodecAudioRenderer(
Context context,
MediaCodecAdapter.Factory codecAdapterFactory,
MediaCodecSelector mediaCodecSelector,
boolean enableDecoderFallback,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioSink audioSink) {
super(
C.TRACK_TYPE_AUDIO,
codecAdapterFactory,
mediaCodecSelector,
enableDecoderFallback,
/* assumedMinimumCodecOperatingRate= */ 44100);
context = context.getApplicationContext();
this.context = context;
this.audioSink = audioSink;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioSink.setListener(new AudioSinkListener());
}
@Override
public String getName() {
return TAG;
}
/**
* Sets whether to enable the experimental feature that keeps and flushes the {@link
* android.media.AudioTrack} when a seek occurs, as opposed to releasing and reinitialising. Off
* by default.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param enableKeepAudioTrackOnSeek Whether to keep the {@link android.media.AudioTrack} on seek.
*/
public void experimentalSetEnableKeepAudioTrackOnSeek(boolean enableKeepAudioTrackOnSeek) {
this.experimentalKeepAudioTrackOnSeek = enableKeepAudioTrackOnSeek;
}
@Override
protected @Capabilities int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format)
throws DecoderQueryException {
if (!MimeTypes.isAudio(format.sampleMimeType)) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
}
@TunnelingSupport
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
boolean formatHasDrm = format.cryptoType != C.CRYPTO_TYPE_NONE;
boolean supportsFormatDrm = supportsFormatDrm(format);
// In direct mode, if the format has DRM then we need to use a decoder that only decrypts.
// Else we don't don't need a decoder at all.
if (supportsFormatDrm
&& audioSink.supportsFormat(format)
&& (!formatHasDrm || MediaCodecUtil.getDecryptOnlyDecoderInfo() != null)) {
return RendererCapabilities.create(C.FORMAT_HANDLED, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport);
}
// If the input is PCM then it will be passed directly to the sink. Hence the sink must support
// the input format directly.
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType) && !audioSink.supportsFormat(format)) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
}
// For all other input formats, we expect the decoder to output 16-bit PCM.
if (!audioSink.supportsFormat(
Util.getPcmFormat(C.ENCODING_PCM_16BIT, format.channelCount, format.sampleRate))) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
}
List<MediaCodecInfo> decoderInfos =
getDecoderInfos(mediaCodecSelector, format, /* requiresSecureDecoder= */ false, audioSink);
if (decoderInfos.isEmpty()) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
}
if (!supportsFormatDrm) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM);
}
// Check whether the first decoder supports the format. This is the preferred decoder for the
// format's MIME type, according to the MediaCodecSelector.
MediaCodecInfo decoderInfo = decoderInfos.get(0);
boolean isFormatSupported = decoderInfo.isFormatSupported(format);
boolean isPreferredDecoder = true;
if (!isFormatSupported) {
// Check whether any of the other decoders support the format.
for (int i = 1; i < decoderInfos.size(); i++) {
MediaCodecInfo otherDecoderInfo = decoderInfos.get(i);
if (otherDecoderInfo.isFormatSupported(format)) {
decoderInfo = otherDecoderInfo;
isFormatSupported = true;
isPreferredDecoder = false;
break;
}
}
}
@C.FormatSupport
int formatSupport = isFormatSupported ? C.FORMAT_HANDLED : C.FORMAT_EXCEEDS_CAPABILITIES;
@AdaptiveSupport
int adaptiveSupport =
isFormatSupported && decoderInfo.isSeamlessAdaptationSupported(format)
? ADAPTIVE_SEAMLESS
: ADAPTIVE_NOT_SEAMLESS;
@HardwareAccelerationSupport
int hardwareAccelerationSupport =
decoderInfo.hardwareAccelerated
? HARDWARE_ACCELERATION_SUPPORTED
: HARDWARE_ACCELERATION_NOT_SUPPORTED;
@DecoderSupport
int decoderSupport = isPreferredDecoder ? DECODER_SUPPORT_PRIMARY : DECODER_SUPPORT_FALLBACK;
return RendererCapabilities.create(
formatSupport,
adaptiveSupport,
tunnelingSupport,
hardwareAccelerationSupport,
decoderSupport);
}
@Override
protected List<MediaCodecInfo> getDecoderInfos(
MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder)
throws DecoderQueryException {
return MediaCodecUtil.getDecoderInfosSortedByFormatSupport(
getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder, audioSink), format);
}
/**
* Returns a list of decoders that can decode media in the specified format, in the priority order
* specified by the {@link MediaCodecSelector}. Note that since the {@link MediaCodecSelector}
* only has access to {@link Format#sampleMimeType}, the list is not ordered to account for
* whether each decoder supports the details of the format (e.g., taking into account the format's
* profile, level, channel count and so on). {@link
* MediaCodecUtil#getDecoderInfosSortedByFormatSupport} can be used to further sort the list into
* an order where decoders that fully support the format come first.
*
* @param mediaCodecSelector The decoder selector.
* @param format The {@link Format} for which a decoder is required.
* @param requiresSecureDecoder Whether a secure decoder is required.
* @param audioSink The {@link AudioSink} to which audio will be output.
* @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty.
* @throws DecoderQueryException Thrown if there was an error querying decoders.
*/
private static List<MediaCodecInfo> getDecoderInfos(
MediaCodecSelector mediaCodecSelector,
Format format,
boolean requiresSecureDecoder,
AudioSink audioSink)
throws DecoderQueryException {
@Nullable String mimeType = format.sampleMimeType;
if (mimeType == null) {
return ImmutableList.of();
}
if (audioSink.supportsFormat(format)) {
// The format is supported directly, so a codec is only needed for decryption.
@Nullable MediaCodecInfo codecInfo = MediaCodecUtil.getDecryptOnlyDecoderInfo();
if (codecInfo != null) {
return ImmutableList.of(codecInfo);
}
}
return MediaCodecUtil.getDecoderInfosSoftMatch(
mediaCodecSelector, format, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false);
}
@Override
protected boolean shouldUseBypass(Format format) {
return audioSink.supportsFormat(format);
}
@Override
protected MediaCodecAdapter.Configuration getMediaCodecConfiguration(
MediaCodecInfo codecInfo,
Format format,
@Nullable MediaCrypto crypto,
float codecOperatingRate) {
codecMaxInputSize = getCodecMaxInputSize(codecInfo, format, getStreamFormats());
codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name);
MediaFormat mediaFormat =
getMediaFormat(format, codecInfo.codecMimeType, codecMaxInputSize, codecOperatingRate);
// Store the input MIME type if we're only using the codec for decryption.
boolean decryptOnlyCodecEnabled =
MimeTypes.AUDIO_RAW.equals(codecInfo.mimeType)
&& !MimeTypes.AUDIO_RAW.equals(format.sampleMimeType);
decryptOnlyCodecFormat = decryptOnlyCodecEnabled ? format : null;
return MediaCodecAdapter.Configuration.createForAudioDecoding(
codecInfo, mediaFormat, format, crypto);
}
@Override
protected DecoderReuseEvaluation canReuseCodec(
MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) {
DecoderReuseEvaluation evaluation = codecInfo.canReuseCodec(oldFormat, newFormat);
@DecoderDiscardReasons int discardReasons = evaluation.discardReasons;
if (isBypassPossible(newFormat)) {
// We prefer direct audio playback so that for multi-channel tracks the audio is not downmixed
// to stereo.
discardReasons |= DecoderReuseEvaluation.DISCARD_REASON_AUDIO_BYPASS_POSSIBLE;
}
if (getCodecMaxInputSize(codecInfo, newFormat) > codecMaxInputSize) {
discardReasons |= DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED;
}
return new DecoderReuseEvaluation(
codecInfo.name,
oldFormat,
newFormat,
discardReasons != 0 ? REUSE_RESULT_NO : evaluation.result,
discardReasons);
}
@Override
@Nullable
public MediaClock getMediaClock() {
return this;
}
@Override
protected float getCodecOperatingRateV23(
float targetPlaybackSpeed, Format format, Format[] streamFormats) {
// Use the highest known stream sample-rate up front, to avoid having to reconfigure the codec
// should an adaptive switch to that stream occur.
int maxSampleRate = -1;
for (Format streamFormat : streamFormats) {
int streamSampleRate = streamFormat.sampleRate;
if (streamSampleRate != Format.NO_VALUE) {
maxSampleRate = max(maxSampleRate, streamSampleRate);
}
}
return maxSampleRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxSampleRate * targetPlaybackSpeed);
}
@Override
protected void onCodecInitialized(
String name,
MediaCodecAdapter.Configuration configuration,
long initializedTimestampMs,
long initializationDurationMs) {
eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs);
}
@Override
protected void onCodecReleased(String name) {
eventDispatcher.decoderReleased(name);
}
@Override
protected void onCodecError(Exception codecError) {
Log.e(TAG, "Audio codec error", codecError);
eventDispatcher.audioCodecError(codecError);
}
@Override
@Nullable
protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder)
throws ExoPlaybackException {
inputFormat = checkNotNull(formatHolder.format);
@Nullable DecoderReuseEvaluation evaluation = super.onInputFormatChanged(formatHolder);
eventDispatcher.inputFormatChanged(inputFormat, evaluation);
return evaluation;
}
@Override
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
throws ExoPlaybackException {
Format audioSinkInputFormat;
@Nullable int[] channelMap = null;
if (decryptOnlyCodecFormat != null) { // Direct playback with a codec for decryption.
audioSinkInputFormat = decryptOnlyCodecFormat;
} else if (getCodec() == null) { // Direct playback with codec bypass.
audioSinkInputFormat = format;
} else {
@C.PcmEncoding int pcmEncoding;
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) {
// For PCM streams, the encoder passes through int samples despite set to float mode.
pcmEncoding = format.pcmEncoding;
} else if (Util.SDK_INT >= 24 && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) {
pcmEncoding = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING);
} else if (mediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) {
pcmEncoding = Util.getPcmEncoding(mediaFormat.getInteger(VIVO_BITS_PER_SAMPLE_KEY));
} else {
// If the format is anything other than PCM then we assume that the audio decoder will
// output 16-bit PCM.
pcmEncoding = C.ENCODING_PCM_16BIT;
}
audioSinkInputFormat =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(pcmEncoding)
.setEncoderDelay(format.encoderDelay)
.setEncoderPadding(format.encoderPadding)
.setChannelCount(mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT))
.setSampleRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE))
.build();
if (codecNeedsDiscardChannelsWorkaround
&& audioSinkInputFormat.channelCount == 6
&& format.channelCount < 6) {
channelMap = new int[format.channelCount];
for (int i = 0; i < format.channelCount; i++) {
channelMap[i] = i;
}
}
}
try {
audioSink.configure(audioSinkInputFormat, /* specifiedBufferSize= */ 0, channelMap);
} catch (AudioSink.ConfigurationException e) {
throw createRendererException(
e, e.format, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED);
}
}
/** See {@link AudioSink.Listener#onPositionDiscontinuity()}. */
@CallSuper
protected void onPositionDiscontinuity() {
// We are out of sync so allow currentPositionUs to jump backwards.
allowPositionDiscontinuity = true;
}
@Override
protected void onEnabled(boolean joining, boolean mayRenderStartOfStream)
throws ExoPlaybackException {
super.onEnabled(joining, mayRenderStartOfStream);
eventDispatcher.enabled(decoderCounters);
if (getConfiguration().tunneling) {
audioSink.enableTunnelingV21();
} else {
audioSink.disableTunneling();
}
audioSink.setPlayerId(getPlayerId());
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
super.onPositionReset(positionUs, joining);
if (experimentalKeepAudioTrackOnSeek) {
audioSink.experimentalFlushWithoutAudioTrackRelease();
} else {
audioSink.flush();
}
currentPositionUs = positionUs;
allowFirstBufferPositionDiscontinuity = true;
allowPositionDiscontinuity = true;
}
@Override
protected void onStarted() {
super.onStarted();
audioSink.play();
}
@Override
protected void onStopped() {
updateCurrentPosition();
audioSink.pause();
super.onStopped();
}
@Override
protected void onDisabled() {
audioSinkNeedsReset = true;
inputFormat = null;
try {
audioSink.flush();
} finally {
try {
super.onDisabled();
} finally {
eventDispatcher.disabled(decoderCounters);
}
}
}
@Override
protected void onReset() {
try {
super.onReset();
} finally {
if (audioSinkNeedsReset) {
audioSinkNeedsReset = false;
audioSink.reset();
}
}
}
@Override
protected void onRelease() {
audioSink.release();
}
@Override
public boolean isEnded() {
return super.isEnded() && audioSink.isEnded();
}
@Override
public boolean isReady() {
return audioSink.hasPendingData() || super.isReady();
}
@Override
public long getPositionUs() {
if (getState() == STATE_STARTED) {
updateCurrentPosition();
}
return currentPositionUs;
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
audioSink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return audioSink.getPlaybackParameters();
}
@Override
protected void onQueueInputBuffer(DecoderInputBuffer buffer) {
if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) {
// TODO: Remove this hack once we have a proper fix for [Internal: b/71876314].
// Allow the position to jump if the first presentable input buffer has a timestamp that
// differs significantly from what was expected.
if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) {
currentPositionUs = buffer.timeUs;
}
allowFirstBufferPositionDiscontinuity = false;
}
}
@Override
protected void onProcessedStreamChange() {
super.onProcessedStreamChange();
audioSink.handleDiscontinuity();
}
@Override
protected boolean processOutputBuffer(
long positionUs,
long elapsedRealtimeUs,
@Nullable MediaCodecAdapter codec,
@Nullable ByteBuffer buffer,
int bufferIndex,
int bufferFlags,
int sampleCount,
long bufferPresentationTimeUs,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format)
throws ExoPlaybackException {
checkNotNull(buffer);
if (decryptOnlyCodecFormat != null
&& (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// Discard output buffers from the passthrough (raw) decoder containing codec specific data.
checkNotNull(codec).releaseOutputBuffer(bufferIndex, false);
return true;
}
if (isDecodeOnlyBuffer) {
if (codec != null) {
codec.releaseOutputBuffer(bufferIndex, false);
}
decoderCounters.skippedOutputBufferCount += sampleCount;
audioSink.handleDiscontinuity();
return true;
}
boolean fullyConsumed;
try {
fullyConsumed = audioSink.handleBuffer(buffer, bufferPresentationTimeUs, sampleCount);
} catch (InitializationException e) {
throw createRendererException(
e, inputFormat, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED);
} catch (WriteException e) {
throw createRendererException(
e, format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
}
if (fullyConsumed) {
if (codec != null) {
codec.releaseOutputBuffer(bufferIndex, false);
}
decoderCounters.renderedOutputBufferCount += sampleCount;
return true;
}
return false;
}
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
try {
audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) {
throw createRendererException(
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
}
}
@Override
protected void onOutputStreamOffsetUsChanged(long outputStreamOffsetUs) {
audioSink.setOutputStreamOffsetUs(outputStreamOffsetUs);
}
@Override
public void handleMessage(@MessageType int messageType, @Nullable Object message)
throws ExoPlaybackException {
switch (messageType) {
case MSG_SET_VOLUME:
audioSink.setVolume((Float) message);
break;
case MSG_SET_AUDIO_ATTRIBUTES:
AudioAttributes audioAttributes = (AudioAttributes) message;
audioSink.setAudioAttributes(audioAttributes);
break;
case MSG_SET_AUX_EFFECT_INFO:
AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message;
audioSink.setAuxEffectInfo(auxEffectInfo);
break;
case MSG_SET_PREFERRED_AUDIO_DEVICE:
if (Util.SDK_INT >= 23) {
Api23.setAudioSinkPreferredDevice(audioSink, message);
}
break;
case MSG_SET_SKIP_SILENCE_ENABLED:
audioSink.setSkipSilenceEnabled((Boolean) message);
break;
case MSG_SET_AUDIO_SESSION_ID:
audioSink.setAudioSessionId((Integer) message);
break;
case MSG_SET_WAKEUP_LISTENER:
this.wakeupListener = (WakeupListener) message;
break;
case MSG_SET_CAMERA_MOTION_LISTENER:
case MSG_SET_CHANGE_FRAME_RATE_STRATEGY:
case MSG_SET_SCALING_MODE:
case MSG_SET_VIDEO_FRAME_METADATA_LISTENER:
case MSG_SET_VIDEO_OUTPUT:
default:
super.handleMessage(messageType, message);
break;
}
}
/**
* Returns a maximum input size suitable for configuring a codec for {@code format} in a way that
* will allow possible adaptation to other compatible formats in {@code streamFormats}.
*
* @param codecInfo A {@link MediaCodecInfo} describing the decoder.
* @param format The {@link Format} for which the codec is being configured.
* @param streamFormats The possible stream formats.
* @return A suitable maximum input size.
*/
protected int getCodecMaxInputSize(
MediaCodecInfo codecInfo, Format format, Format[] streamFormats) {
int maxInputSize = getCodecMaxInputSize(codecInfo, format);
if (streamFormats.length == 1) {
// The single entry in streamFormats must correspond to the format for which the codec is
// being configured.
return maxInputSize;
}
for (Format streamFormat : streamFormats) {
if (codecInfo.canReuseCodec(format, streamFormat).result != REUSE_RESULT_NO) {
maxInputSize = max(maxInputSize, getCodecMaxInputSize(codecInfo, streamFormat));
}
}
return maxInputSize;
}
/**
* Returns a maximum input buffer size for a given {@link Format}.
*
* @param codecInfo A {@link MediaCodecInfo} describing the decoder.
* @param format The {@link Format}.
* @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not
* be determined.
*/
private int getCodecMaxInputSize(MediaCodecInfo codecInfo, Format format) {
if ("OMX.google.raw.decoder".equals(codecInfo.name)) {
// OMX.google.raw.decoder didn't resize its output buffers correctly prior to N, except on
// Android TV running M, so there's no point requesting a non-default input size. Doing so may
// cause a native crash, whereas not doing so will cause a more controlled failure when
// attempting to fill an input buffer. See: https://github.com/google/ExoPlayer/issues/4057.
if (Util.SDK_INT < 24 && !(Util.SDK_INT == 23 && Util.isTv(context))) {
return Format.NO_VALUE;
}
}
return format.maxInputSize;
}
/**
* Returns the framework {@link MediaFormat} that can be used to configure a {@link MediaCodec}
* for decoding the given {@link Format} for playback.
*
* @param format The {@link Format} of the media.
* @param codecMimeType The MIME type handled by the codec.
* @param codecMaxInputSize The maximum input size supported by the codec.
* @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if
* no codec operating rate should be set.
* @return The framework {@link MediaFormat}.
*/
@SuppressLint("InlinedApi")
protected MediaFormat getMediaFormat(
Format format, String codecMimeType, int codecMaxInputSize, float codecOperatingRate) {
MediaFormat mediaFormat = new MediaFormat();
// Set format parameters that should always be set.
mediaFormat.setString(MediaFormat.KEY_MIME, codecMimeType);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, format.channelCount);
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, format.sampleRate);
MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
// Set codec max values.
MediaFormatUtil.maybeSetInteger(mediaFormat, MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxInputSize);
// Set codec configuration values.
if (Util.SDK_INT >= 23) {
mediaFormat.setInteger(MediaFormat.KEY_PRIORITY, 0 /* realtime priority */);
if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET && !deviceDoesntSupportOperatingRate()) {
mediaFormat.setFloat(MediaFormat.KEY_OPERATING_RATE, codecOperatingRate);
}
}
if (Util.SDK_INT <= 28 && MimeTypes.AUDIO_AC4.equals(format.sampleMimeType)) {
// On some older builds, the AC-4 decoder expects to receive samples formatted as raw frames
// not sync frames. Set a format key to override this.
mediaFormat.setInteger("ac4-is-sync", 1);
}
if (Util.SDK_INT >= 24
&& audioSink.getFormatSupport(
Util.getPcmFormat(C.ENCODING_PCM_FLOAT, format.channelCount, format.sampleRate))
== AudioSink.SINK_FORMAT_SUPPORTED_DIRECTLY) {
mediaFormat.setInteger(MediaFormat.KEY_PCM_ENCODING, AudioFormat.ENCODING_PCM_FLOAT);
}
if (Util.SDK_INT >= 32) {
mediaFormat.setInteger(MediaFormat.KEY_MAX_OUTPUT_CHANNEL_COUNT, 99);
}
return mediaFormat;
}
private void updateCurrentPosition() {
long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded());
if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) {
currentPositionUs =
allowPositionDiscontinuity
? newCurrentPositionUs
: max(currentPositionUs, newCurrentPositionUs);
allowPositionDiscontinuity = false;
}
}
/**
* Returns whether the device's decoders are known to not support setting the codec operating
* rate.
*
* <p>See <a href="https://github.com/google/ExoPlayer/issues/5821">GitHub issue #5821</a>.
*/
private static boolean deviceDoesntSupportOperatingRate() {
return Util.SDK_INT == 23
&& ("ZTE B2017G".equals(Util.MODEL) || "AXON 7 mini".equals(Util.MODEL));
}
/**
* Returns whether the decoder is known to output six audio channels when provided with input with
* fewer than six channels.
*
* <p>See [Internal: b/35655036].
*/
private static boolean codecNeedsDiscardChannelsWorkaround(String codecName) {
// The workaround applies to Samsung Galaxy S6 and Samsung Galaxy S7.
return Util.SDK_INT < 24
&& "OMX.SEC.aac.dec".equals(codecName)
&& "samsung".equals(Util.MANUFACTURER)
&& (Util.DEVICE.startsWith("zeroflte")
|| Util.DEVICE.startsWith("herolte")
|| Util.DEVICE.startsWith("heroqlte"));
}
private final class AudioSinkListener implements AudioSink.Listener {
@Override
public void onPositionDiscontinuity() {
MediaCodecAudioRenderer.this.onPositionDiscontinuity();
}
@Override
public void onPositionAdvancing(long playoutStartSystemTimeMs) {
eventDispatcher.positionAdvancing(playoutStartSystemTimeMs);
}
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.underrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
@Override
public void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) {
eventDispatcher.skipSilenceEnabledChanged(skipSilenceEnabled);
}
@Override
public void onOffloadBufferEmptying() {
if (wakeupListener != null) {
wakeupListener.onWakeup();
}
}
@Override
public void onOffloadBufferFull() {
if (wakeupListener != null) {
wakeupListener.onSleep();
}
}
@Override
public void onAudioSinkError(Exception audioSinkError) {
Log.e(TAG, "Audio sink error", audioSinkError);
eventDispatcher.audioSinkError(audioSinkError);
}
@Override
public void onAudioCapabilitiesChanged() {
MediaCodecAudioRenderer.this.onRendererCapabilitiesChanged();
}
}
@RequiresApi(23)
private static final class Api23 {
private Api23() {}
@DoNotInline
public static void setAudioSinkPreferredDevice(
AudioSink audioSink, @Nullable Object messagePayload) {
@Nullable AudioDeviceInfo audioDeviceInfo = (AudioDeviceInfo) messagePayload;
audioSink.setPreferredDevice(audioDeviceInfo);
}
}
}
|
googleapis/google-cloud-java | 37,927 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/UserSpecifiedAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/warehouse.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Annotation provided by users.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.UserSpecifiedAnnotation}
*/
public final class UserSpecifiedAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.UserSpecifiedAnnotation)
UserSpecifiedAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserSpecifiedAnnotation.newBuilder() to construct.
private UserSpecifiedAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserSpecifiedAnnotation() {
key_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserSpecifiedAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UserSpecifiedAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UserSpecifiedAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.UserSpecifiedAnnotation.class,
com.google.cloud.visionai.v1.UserSpecifiedAnnotation.Builder.class);
}
private int bitField0_;
public static final int KEY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object key_ = "";
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The key.
*/
@java.lang.Override
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for key.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VALUE_FIELD_NUMBER = 2;
private com.google.cloud.visionai.v1.AnnotationValue value_;
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*
* @return Whether the value field is set.
*/
@java.lang.Override
public boolean hasValue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*
* @return The value.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.AnnotationValue getValue() {
return value_ == null
? com.google.cloud.visionai.v1.AnnotationValue.getDefaultInstance()
: value_;
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.AnnotationValueOrBuilder getValueOrBuilder() {
return value_ == null
? com.google.cloud.visionai.v1.AnnotationValue.getDefaultInstance()
: value_;
}
public static final int PARTITION_FIELD_NUMBER = 3;
private com.google.cloud.visionai.v1.Partition partition_;
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*
* @return Whether the partition field is set.
*/
@java.lang.Override
public boolean hasPartition() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*
* @return The partition.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.Partition getPartition() {
return partition_ == null
? com.google.cloud.visionai.v1.Partition.getDefaultInstance()
: partition_;
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.PartitionOrBuilder getPartitionOrBuilder() {
return partition_ == null
? com.google.cloud.visionai.v1.Partition.getDefaultInstance()
: partition_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getValue());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getPartition());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getValue());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPartition());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.UserSpecifiedAnnotation)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.UserSpecifiedAnnotation other =
(com.google.cloud.visionai.v1.UserSpecifiedAnnotation) obj;
if (!getKey().equals(other.getKey())) return false;
if (hasValue() != other.hasValue()) return false;
if (hasValue()) {
if (!getValue().equals(other.getValue())) return false;
}
if (hasPartition() != other.hasPartition()) return false;
if (hasPartition()) {
if (!getPartition().equals(other.getPartition())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
if (hasPartition()) {
hash = (37 * hash) + PARTITION_FIELD_NUMBER;
hash = (53 * hash) + getPartition().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.UserSpecifiedAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Annotation provided by users.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.UserSpecifiedAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.UserSpecifiedAnnotation)
com.google.cloud.visionai.v1.UserSpecifiedAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UserSpecifiedAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UserSpecifiedAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.UserSpecifiedAnnotation.class,
com.google.cloud.visionai.v1.UserSpecifiedAnnotation.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.UserSpecifiedAnnotation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getValueFieldBuilder();
getPartitionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
key_ = "";
value_ = null;
if (valueBuilder_ != null) {
valueBuilder_.dispose();
valueBuilder_ = null;
}
partition_ = null;
if (partitionBuilder_ != null) {
partitionBuilder_.dispose();
partitionBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UserSpecifiedAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UserSpecifiedAnnotation getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.UserSpecifiedAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.UserSpecifiedAnnotation build() {
com.google.cloud.visionai.v1.UserSpecifiedAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UserSpecifiedAnnotation buildPartial() {
com.google.cloud.visionai.v1.UserSpecifiedAnnotation result =
new com.google.cloud.visionai.v1.UserSpecifiedAnnotation(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.UserSpecifiedAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.key_ = key_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.value_ = valueBuilder_ == null ? value_ : valueBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.partition_ = partitionBuilder_ == null ? partition_ : partitionBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.UserSpecifiedAnnotation) {
return mergeFrom((com.google.cloud.visionai.v1.UserSpecifiedAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.UserSpecifiedAnnotation other) {
if (other == com.google.cloud.visionai.v1.UserSpecifiedAnnotation.getDefaultInstance())
return this;
if (!other.getKey().isEmpty()) {
key_ = other.key_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasValue()) {
mergeValue(other.getValue());
}
if (other.hasPartition()) {
mergePartition(other.getPartition());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
key_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getValueFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getPartitionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object key_ = "";
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The key.
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for key.
*/
public com.google.protobuf.ByteString getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
key_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The key to set.
* @return This builder for chaining.
*/
public Builder setKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Key of the annotation. The key must be set with type by
* CreateDataSchema.
* </pre>
*
* <code>string key = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for key to set.
* @return This builder for chaining.
*/
public Builder setKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.visionai.v1.AnnotationValue value_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.AnnotationValue,
com.google.cloud.visionai.v1.AnnotationValue.Builder,
com.google.cloud.visionai.v1.AnnotationValueOrBuilder>
valueBuilder_;
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*
* @return Whether the value field is set.
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*
* @return The value.
*/
public com.google.cloud.visionai.v1.AnnotationValue getValue() {
if (valueBuilder_ == null) {
return value_ == null
? com.google.cloud.visionai.v1.AnnotationValue.getDefaultInstance()
: value_;
} else {
return valueBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public Builder setValue(com.google.cloud.visionai.v1.AnnotationValue value) {
if (valueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
} else {
valueBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public Builder setValue(com.google.cloud.visionai.v1.AnnotationValue.Builder builderForValue) {
if (valueBuilder_ == null) {
value_ = builderForValue.build();
} else {
valueBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public Builder mergeValue(com.google.cloud.visionai.v1.AnnotationValue value) {
if (valueBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& value_ != null
&& value_ != com.google.cloud.visionai.v1.AnnotationValue.getDefaultInstance()) {
getValueBuilder().mergeFrom(value);
} else {
value_ = value;
}
} else {
valueBuilder_.mergeFrom(value);
}
if (value_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000002);
value_ = null;
if (valueBuilder_ != null) {
valueBuilder_.dispose();
valueBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public com.google.cloud.visionai.v1.AnnotationValue.Builder getValueBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getValueFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
public com.google.cloud.visionai.v1.AnnotationValueOrBuilder getValueOrBuilder() {
if (valueBuilder_ != null) {
return valueBuilder_.getMessageOrBuilder();
} else {
return value_ == null
? com.google.cloud.visionai.v1.AnnotationValue.getDefaultInstance()
: value_;
}
}
/**
*
*
* <pre>
* Value of the annotation. The value must be able to convert
* to the type according to the data schema.
* </pre>
*
* <code>.google.cloud.visionai.v1.AnnotationValue value = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.AnnotationValue,
com.google.cloud.visionai.v1.AnnotationValue.Builder,
com.google.cloud.visionai.v1.AnnotationValueOrBuilder>
getValueFieldBuilder() {
if (valueBuilder_ == null) {
valueBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.AnnotationValue,
com.google.cloud.visionai.v1.AnnotationValue.Builder,
com.google.cloud.visionai.v1.AnnotationValueOrBuilder>(
getValue(), getParentForChildren(), isClean());
value_ = null;
}
return valueBuilder_;
}
private com.google.cloud.visionai.v1.Partition partition_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Partition,
com.google.cloud.visionai.v1.Partition.Builder,
com.google.cloud.visionai.v1.PartitionOrBuilder>
partitionBuilder_;
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*
* @return Whether the partition field is set.
*/
public boolean hasPartition() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*
* @return The partition.
*/
public com.google.cloud.visionai.v1.Partition getPartition() {
if (partitionBuilder_ == null) {
return partition_ == null
? com.google.cloud.visionai.v1.Partition.getDefaultInstance()
: partition_;
} else {
return partitionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public Builder setPartition(com.google.cloud.visionai.v1.Partition value) {
if (partitionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
partition_ = value;
} else {
partitionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public Builder setPartition(com.google.cloud.visionai.v1.Partition.Builder builderForValue) {
if (partitionBuilder_ == null) {
partition_ = builderForValue.build();
} else {
partitionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public Builder mergePartition(com.google.cloud.visionai.v1.Partition value) {
if (partitionBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& partition_ != null
&& partition_ != com.google.cloud.visionai.v1.Partition.getDefaultInstance()) {
getPartitionBuilder().mergeFrom(value);
} else {
partition_ = value;
}
} else {
partitionBuilder_.mergeFrom(value);
}
if (partition_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public Builder clearPartition() {
bitField0_ = (bitField0_ & ~0x00000004);
partition_ = null;
if (partitionBuilder_ != null) {
partitionBuilder_.dispose();
partitionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public com.google.cloud.visionai.v1.Partition.Builder getPartitionBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPartitionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
public com.google.cloud.visionai.v1.PartitionOrBuilder getPartitionOrBuilder() {
if (partitionBuilder_ != null) {
return partitionBuilder_.getMessageOrBuilder();
} else {
return partition_ == null
? com.google.cloud.visionai.v1.Partition.getDefaultInstance()
: partition_;
}
}
/**
*
*
* <pre>
* Partition information in time and space for the sub-asset level annotation.
* </pre>
*
* <code>.google.cloud.visionai.v1.Partition partition = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Partition,
com.google.cloud.visionai.v1.Partition.Builder,
com.google.cloud.visionai.v1.PartitionOrBuilder>
getPartitionFieldBuilder() {
if (partitionBuilder_ == null) {
partitionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Partition,
com.google.cloud.visionai.v1.Partition.Builder,
com.google.cloud.visionai.v1.PartitionOrBuilder>(
getPartition(), getParentForChildren(), isClean());
partition_ = null;
}
return partitionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.UserSpecifiedAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.UserSpecifiedAnnotation)
private static final com.google.cloud.visionai.v1.UserSpecifiedAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.UserSpecifiedAnnotation();
}
public static com.google.cloud.visionai.v1.UserSpecifiedAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserSpecifiedAnnotation> PARSER =
new com.google.protobuf.AbstractParser<UserSpecifiedAnnotation>() {
@java.lang.Override
public UserSpecifiedAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UserSpecifiedAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserSpecifiedAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UserSpecifiedAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,938 | java-workflow-executions/proto-google-cloud-workflow-executions-v1/src/main/java/com/google/cloud/workflows/executions/v1/ListExecutionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/workflows/executions/v1/executions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.workflows.executions.v1;
/**
*
*
* <pre>
* Response for the
* [ListExecutions][google.cloud.workflows.executions.v1.Executions.ListExecutions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.executions.v1.ListExecutionsResponse}
*/
public final class ListExecutionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.workflows.executions.v1.ListExecutionsResponse)
ListExecutionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExecutionsResponse.newBuilder() to construct.
private ListExecutionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExecutionsResponse() {
executions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExecutionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.executions.v1.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1_ListExecutionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.executions.v1.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1_ListExecutionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse.class,
com.google.cloud.workflows.executions.v1.ListExecutionsResponse.Builder.class);
}
public static final int EXECUTIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.workflows.executions.v1.Execution> executions_;
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.workflows.executions.v1.Execution> getExecutionsList() {
return executions_;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.workflows.executions.v1.ExecutionOrBuilder>
getExecutionsOrBuilderList() {
return executions_;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
@java.lang.Override
public int getExecutionsCount() {
return executions_.size();
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.workflows.executions.v1.Execution getExecutions(int index) {
return executions_.get(index);
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.workflows.executions.v1.ExecutionOrBuilder getExecutionsOrBuilder(
int index) {
return executions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < executions_.size(); i++) {
output.writeMessage(1, executions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < executions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, executions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.workflows.executions.v1.ListExecutionsResponse)) {
return super.equals(obj);
}
com.google.cloud.workflows.executions.v1.ListExecutionsResponse other =
(com.google.cloud.workflows.executions.v1.ListExecutionsResponse) obj;
if (!getExecutionsList().equals(other.getExecutionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getExecutionsCount() > 0) {
hash = (37 * hash) + EXECUTIONS_FIELD_NUMBER;
hash = (53 * hash) + getExecutionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the
* [ListExecutions][google.cloud.workflows.executions.v1.Executions.ListExecutions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.executions.v1.ListExecutionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.workflows.executions.v1.ListExecutionsResponse)
com.google.cloud.workflows.executions.v1.ListExecutionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.executions.v1.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1_ListExecutionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.executions.v1.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1_ListExecutionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse.class,
com.google.cloud.workflows.executions.v1.ListExecutionsResponse.Builder.class);
}
// Construct using com.google.cloud.workflows.executions.v1.ListExecutionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (executionsBuilder_ == null) {
executions_ = java.util.Collections.emptyList();
} else {
executions_ = null;
executionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.workflows.executions.v1.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1_ListExecutionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1.ListExecutionsResponse
getDefaultInstanceForType() {
return com.google.cloud.workflows.executions.v1.ListExecutionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1.ListExecutionsResponse build() {
com.google.cloud.workflows.executions.v1.ListExecutionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1.ListExecutionsResponse buildPartial() {
com.google.cloud.workflows.executions.v1.ListExecutionsResponse result =
new com.google.cloud.workflows.executions.v1.ListExecutionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse result) {
if (executionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
executions_ = java.util.Collections.unmodifiableList(executions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.executions_ = executions_;
} else {
result.executions_ = executionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.workflows.executions.v1.ListExecutionsResponse) {
return mergeFrom((com.google.cloud.workflows.executions.v1.ListExecutionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.workflows.executions.v1.ListExecutionsResponse other) {
if (other
== com.google.cloud.workflows.executions.v1.ListExecutionsResponse.getDefaultInstance())
return this;
if (executionsBuilder_ == null) {
if (!other.executions_.isEmpty()) {
if (executions_.isEmpty()) {
executions_ = other.executions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureExecutionsIsMutable();
executions_.addAll(other.executions_);
}
onChanged();
}
} else {
if (!other.executions_.isEmpty()) {
if (executionsBuilder_.isEmpty()) {
executionsBuilder_.dispose();
executionsBuilder_ = null;
executions_ = other.executions_;
bitField0_ = (bitField0_ & ~0x00000001);
executionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getExecutionsFieldBuilder()
: null;
} else {
executionsBuilder_.addAllMessages(other.executions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.workflows.executions.v1.Execution m =
input.readMessage(
com.google.cloud.workflows.executions.v1.Execution.parser(),
extensionRegistry);
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
executions_.add(m);
} else {
executionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.workflows.executions.v1.Execution> executions_ =
java.util.Collections.emptyList();
private void ensureExecutionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
executions_ =
new java.util.ArrayList<com.google.cloud.workflows.executions.v1.Execution>(
executions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.workflows.executions.v1.Execution,
com.google.cloud.workflows.executions.v1.Execution.Builder,
com.google.cloud.workflows.executions.v1.ExecutionOrBuilder>
executionsBuilder_;
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public java.util.List<com.google.cloud.workflows.executions.v1.Execution> getExecutionsList() {
if (executionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(executions_);
} else {
return executionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public int getExecutionsCount() {
if (executionsBuilder_ == null) {
return executions_.size();
} else {
return executionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public com.google.cloud.workflows.executions.v1.Execution getExecutions(int index) {
if (executionsBuilder_ == null) {
return executions_.get(index);
} else {
return executionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder setExecutions(
int index, com.google.cloud.workflows.executions.v1.Execution value) {
if (executionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExecutionsIsMutable();
executions_.set(index, value);
onChanged();
} else {
executionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder setExecutions(
int index, com.google.cloud.workflows.executions.v1.Execution.Builder builderForValue) {
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
executions_.set(index, builderForValue.build());
onChanged();
} else {
executionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder addExecutions(com.google.cloud.workflows.executions.v1.Execution value) {
if (executionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExecutionsIsMutable();
executions_.add(value);
onChanged();
} else {
executionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder addExecutions(
int index, com.google.cloud.workflows.executions.v1.Execution value) {
if (executionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExecutionsIsMutable();
executions_.add(index, value);
onChanged();
} else {
executionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder addExecutions(
com.google.cloud.workflows.executions.v1.Execution.Builder builderForValue) {
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
executions_.add(builderForValue.build());
onChanged();
} else {
executionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder addExecutions(
int index, com.google.cloud.workflows.executions.v1.Execution.Builder builderForValue) {
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
executions_.add(index, builderForValue.build());
onChanged();
} else {
executionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder addAllExecutions(
java.lang.Iterable<? extends com.google.cloud.workflows.executions.v1.Execution> values) {
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, executions_);
onChanged();
} else {
executionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder clearExecutions() {
if (executionsBuilder_ == null) {
executions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
executionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public Builder removeExecutions(int index) {
if (executionsBuilder_ == null) {
ensureExecutionsIsMutable();
executions_.remove(index);
onChanged();
} else {
executionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public com.google.cloud.workflows.executions.v1.Execution.Builder getExecutionsBuilder(
int index) {
return getExecutionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public com.google.cloud.workflows.executions.v1.ExecutionOrBuilder getExecutionsOrBuilder(
int index) {
if (executionsBuilder_ == null) {
return executions_.get(index);
} else {
return executionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.workflows.executions.v1.ExecutionOrBuilder>
getExecutionsOrBuilderList() {
if (executionsBuilder_ != null) {
return executionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(executions_);
}
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public com.google.cloud.workflows.executions.v1.Execution.Builder addExecutionsBuilder() {
return getExecutionsFieldBuilder()
.addBuilder(com.google.cloud.workflows.executions.v1.Execution.getDefaultInstance());
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public com.google.cloud.workflows.executions.v1.Execution.Builder addExecutionsBuilder(
int index) {
return getExecutionsFieldBuilder()
.addBuilder(
index, com.google.cloud.workflows.executions.v1.Execution.getDefaultInstance());
}
/**
*
*
* <pre>
* The executions which match the request.
* </pre>
*
* <code>repeated .google.cloud.workflows.executions.v1.Execution executions = 1;</code>
*/
public java.util.List<com.google.cloud.workflows.executions.v1.Execution.Builder>
getExecutionsBuilderList() {
return getExecutionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.workflows.executions.v1.Execution,
com.google.cloud.workflows.executions.v1.Execution.Builder,
com.google.cloud.workflows.executions.v1.ExecutionOrBuilder>
getExecutionsFieldBuilder() {
if (executionsBuilder_ == null) {
executionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.workflows.executions.v1.Execution,
com.google.cloud.workflows.executions.v1.Execution.Builder,
com.google.cloud.workflows.executions.v1.ExecutionOrBuilder>(
executions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
executions_ = null;
}
return executionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.workflows.executions.v1.ListExecutionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.workflows.executions.v1.ListExecutionsResponse)
private static final com.google.cloud.workflows.executions.v1.ListExecutionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.workflows.executions.v1.ListExecutionsResponse();
}
public static com.google.cloud.workflows.executions.v1.ListExecutionsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExecutionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListExecutionsResponse>() {
@java.lang.Override
public ListExecutionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExecutionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExecutionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1.ListExecutionsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/juneau | 35,985 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/html/HtmlSchemaSerializer.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.html;
import static org.apache.juneau.collections.JsonMap.*;
import java.lang.annotation.*;
import java.nio.charset.*;
import java.util.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.json.*;
import org.apache.juneau.jsonschema.*;
import org.apache.juneau.utils.*;
import org.apache.juneau.xml.*;
/**
* Serializes POJO metamodels to HTML.
*
* <h5 class='topic'>Media types</h5>
*
* Handles <c>Accept</c> types: <bc>text/html+schema</bc>
* <p>
* Produces <c>Content-Type</c> types: <bc>text/html</bc>
*
* <h5 class='topic'>Description</h5>
*
* Essentially the same as {@link HtmlSerializer}, except serializes the POJO metamodel instead of the model itself.
*
* <p>
* Produces output that describes the POJO metamodel similar to an XML schema document.
*
* <p>
* The easiest way to create instances of this class is through the {@link HtmlSerializer#getSchemaSerializer()},
* which will create a schema serializer with the same settings as the originating serializer.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>This class is thread safe and reusable.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/HtmlBasics">HTML Basics</a>
* </ul>
*/
public class HtmlSchemaSerializer extends HtmlSerializer {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
/** Default serializer, all default settings.*/
public static final HtmlSchemaSerializer DEFAULT = new HtmlSchemaSerializer(create());
/** Default serializer, all default settings.*/
public static final HtmlSchemaSerializer DEFAULT_READABLE = new Readable(create());
/** Default serializer, single quotes, simple mode. */
public static final HtmlSchemaSerializer DEFAULT_SIMPLE = new Simple(create());
/** Default serializer, single quotes, simple mode, with whitespace. */
public static final HtmlSchemaSerializer DEFAULT_SIMPLE_READABLE = new SimpleReadable(create());
/**
* Creates a new builder for this object.
*
* @return A new builder.
*/
public static Builder create() {
return new Builder();
}
//-------------------------------------------------------------------------------------------------------------------
// Static subclasses
//-------------------------------------------------------------------------------------------------------------------
/** Default serializer, with whitespace. */
public static class Readable extends HtmlSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public Readable(Builder builder) {
super(builder.useWhitespace());
}
}
/** Default serializer, single quotes, simple mode. */
public static class Simple extends HtmlSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public Simple(Builder builder) {
super(builder.quoteChar('\''));
}
}
/** Default serializer, single quotes, simple mode, with whitespace. */
public static class SimpleReadable extends HtmlSchemaSerializer {
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public SimpleReadable(Builder builder) {
super(builder.quoteChar('\'').useWhitespace());
}
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends HtmlSerializer.Builder {
private static final Cache<HashKey,HtmlSchemaSerializer> CACHE = Cache.of(HashKey.class, HtmlSchemaSerializer.class).build();
JsonSchemaGenerator.Builder generatorBuilder;
/**
* Constructor, default settings.
*/
protected Builder() {
produces("text/html");
accept("text/html+schema");
generatorBuilder = JsonSchemaGenerator.create().beanContext(beanContext());
}
/**
* Copy constructor.
*
* @param copyFrom The bean to copy from.
*/
protected Builder(HtmlSchemaSerializer copyFrom) {
super(copyFrom);
generatorBuilder = copyFrom.generator.copy().beanContext(beanContext());
}
/**
* Copy constructor.
*
* @param copyFrom The builder to copy from.
*/
protected Builder(Builder copyFrom) {
super(copyFrom);
generatorBuilder = copyFrom.generatorBuilder.copy().beanContext(beanContext());
}
@Override /* Context.Builder */
public Builder copy() {
return new Builder(this);
}
@Override /* Context.Builder */
public HtmlSchemaSerializer build() {
return cache(CACHE).build(HtmlSchemaSerializer.class);
}
@Override /* Context.Builder */
public HashKey hashKey() {
return HashKey.of(
super.hashKey(),
generatorBuilder.hashKey()
);
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Add descriptions.
*
* <p>
* Identifies which categories of types that descriptions should be automatically added to generated schemas.
* <p>
* The description is the result of calling {@link ClassMeta#getFullName()}.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#addDescriptionsTo(TypeCategory...)}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addDescriptionsTo(TypeCategory...values) {
generatorBuilder.addDescriptionsTo(values);
return this;
}
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Add examples.
*
* <p>
* Identifies which categories of types that examples should be automatically added to generated schemas.
* <p>
* The examples come from calling {@link ClassMeta#getExample(BeanSession,JsonParserSession)} which in turn gets examples
* from the following:
* <ul class='javatree'>
* <li class='ja'>{@link Example}
* <li class='ja'>{@link Marshalled#example() Marshalled(example)}
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#addExamplesTo(TypeCategory...)}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addExamplesTo(TypeCategory...values) {
generatorBuilder.addExamplesTo(values);
return this;
}
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Allow nested descriptions.
*
* <p>
* Identifies whether nested descriptions are allowed in schema definitions.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#allowNestedDescriptions()}
* </ul>
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedDescriptions() {
generatorBuilder.allowNestedDescriptions();
return this;
}
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Allow nested examples.
*
* <p>
* Identifies whether nested examples are allowed in schema definitions.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#allowNestedExamples()}
* </ul>
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedExamples() {
generatorBuilder.allowNestedExamples();
return this;
}
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Schema definition mapper.
*
* <p>
* Interface to use for converting Bean classes to definition IDs and URIs.
* <p>
* Used primarily for defining common definition sections for beans in Swagger JSON.
* <p>
* This setting is ignored if {@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#useBeanDefs()} is not enabled.
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='jm'>{@link org.apache.juneau.jsonschema.JsonSchemaGenerator.Builder#beanDefMapper(Class)}
* </ul>
*
* @param value
* The new value for this property.
* <br>The default is {@link org.apache.juneau.jsonschema.BasicBeanDefMapper}.
* @return This object.
*/
@FluentSetter
public Builder beanDefMapper(Class<? extends BeanDefMapper> value) {
generatorBuilder.beanDefMapper(value);
return this;
}
/**
* <i><l>HtmlSchemaSerializer</l> configuration property: </i> Use bean definitions.
*
* <p>
* When enabled, schemas on beans will be serialized as the following:
* <p class='bjson'>
* {
* type: <js>'object'</js>,
* <js>'$ref'</js>: <js>'#/definitions/TypeId'</js>
* }
* </p>
*
* @return This object.
*/
@FluentSetter
public Builder useBeanDefs() {
generatorBuilder.useBeanDefs();
return this;
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder annotations(Annotation...values) {
super.annotations(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder apply(AnnotationWorkList work) {
super.apply(work);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Object...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Class<?>...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder cache(Cache<HashKey,? extends org.apache.juneau.Context> value) {
super.cache(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug() {
super.debug();
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug(boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder impl(Context value) {
super.impl(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder type(Class<? extends org.apache.juneau.Context> value) {
super.type(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanClassVisibility(Visibility value) {
super.beanClassVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanConstructorVisibility(Visibility value) {
super.beanConstructorVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext.Builder value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanDictionary(java.lang.Class<?>...values) {
super.beanDictionary(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanFieldVisibility(Visibility value) {
super.beanFieldVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanInterceptor(Class<?> on, Class<? extends org.apache.juneau.swap.BeanInterceptor<?>> value) {
super.beanInterceptor(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMapPutReturnsOldValue() {
super.beanMapPutReturnsOldValue();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMethodVisibility(Visibility value) {
super.beanMethodVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Map<String,Object> values) {
super.beanProperties(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Class<?> beanClass, String properties) {
super.beanProperties(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(String beanClassName, String properties) {
super.beanProperties(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Map<String,Object> values) {
super.beanPropertiesExcludes(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Class<?> beanClass, String properties) {
super.beanPropertiesExcludes(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(String beanClassName, String properties) {
super.beanPropertiesExcludes(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Map<String,Object> values) {
super.beanPropertiesReadOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Class<?> beanClass, String properties) {
super.beanPropertiesReadOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(String beanClassName, String properties) {
super.beanPropertiesReadOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Map<String,Object> values) {
super.beanPropertiesWriteOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Class<?> beanClass, String properties) {
super.beanPropertiesWriteOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(String beanClassName, String properties) {
super.beanPropertiesWriteOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireDefaultConstructor() {
super.beansRequireDefaultConstructor();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSerializable() {
super.beansRequireSerializable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSettersForGetters() {
super.beansRequireSettersForGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder dictionaryOn(Class<?> on, java.lang.Class<?>...values) {
super.dictionaryOn(on, values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableBeansRequireSomeProperties() {
super.disableBeansRequireSomeProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreMissingSetters() {
super.disableIgnoreMissingSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreTransientFields() {
super.disableIgnoreTransientFields();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreUnknownNullBeanProperties() {
super.disableIgnoreUnknownNullBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableInterfaceProxies() {
super.disableInterfaceProxies();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, T o) {
super.example(pojoClass, o);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, String json) {
super.example(pojoClass, json);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters() {
super.findFluentSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters(Class<?> on) {
super.findFluentSetters(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnGetters() {
super.ignoreInvocationExceptionsOnGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnSetters() {
super.ignoreInvocationExceptionsOnSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownBeanProperties() {
super.ignoreUnknownBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownEnumValues() {
super.ignoreUnknownEnumValues();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClass(Class<?> interfaceClass, Class<?> implClass) {
super.implClass(interfaceClass, implClass);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClasses(Map<Class<?>,Class<?>> values) {
super.implClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaceClass(Class<?> on, Class<?> value) {
super.interfaceClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaces(java.lang.Class<?>...value) {
super.interfaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanClasses(java.lang.Class<?>...values) {
super.notBeanClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanPackages(String...values) {
super.notBeanPackages(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<?> on, Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties() {
super.sortProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties(java.lang.Class<?>...on) {
super.sortProperties(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder stopClass(Class<?> on, Class<?> value) {
super.stopClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction) {
super.swap(normalClass, swappedClass, swapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction, ThrowingFunction<S,T> unswapFunction) {
super.swap(normalClass, swappedClass, swapFunction, unswapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Object...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Class<?>...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typeName(Class<?> on, String value) {
super.typeName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(String value) {
super.typePropertyName(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(Class<?> on, String value) {
super.typePropertyName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useEnumNames() {
super.useEnumNames();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useJavaBeanIntrospector() {
super.useJavaBeanIntrospector();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions() {
super.detectRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions(boolean value) {
super.detectRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions() {
super.ignoreRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions(boolean value) {
super.ignoreRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder initialDepth(int value) {
super.initialDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder maxDepth(int value) {
super.maxDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder accept(String value) {
super.accept(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes() {
super.addBeanTypes();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes(boolean value) {
super.addBeanTypes(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType() {
super.addRootType();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType(boolean value) {
super.addRootType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties() {
super.keepNullProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties(boolean value) {
super.keepNullProperties(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder listener(Class<? extends org.apache.juneau.serializer.SerializerListener> value) {
super.listener(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder produces(String value) {
super.produces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections() {
super.sortCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections(boolean value) {
super.sortCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps() {
super.sortMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps(boolean value) {
super.sortMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections() {
super.trimEmptyCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections(boolean value) {
super.trimEmptyCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps() {
super.trimEmptyMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps(boolean value) {
super.trimEmptyMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings() {
super.trimStrings();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings(boolean value) {
super.trimStrings(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriContext(UriContext value) {
super.uriContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriRelativity(UriRelativity value) {
super.uriRelativity(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriResolution(UriResolution value) {
super.uriResolution(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder fileCharset(Charset value) {
super.fileCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder maxIndent(int value) {
super.maxIndent(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteChar(char value) {
super.quoteChar(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteCharOverride(char value) {
super.quoteCharOverride(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder sq() {
super.sq();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder streamCharset(Charset value) {
super.streamCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace() {
super.useWhitespace();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace(boolean value) {
super.useWhitespace(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder ws() {
super.ws();
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder addBeanTypesXml() {
super.addBeanTypesXml();
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder addBeanTypesXml(boolean value) {
super.addBeanTypesXml(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder addNamespaceUrisToRoot() {
super.addNamespaceUrisToRoot();
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder addNamespaceUrisToRoot(boolean value) {
super.addNamespaceUrisToRoot(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder defaultNamespace(Namespace value) {
super.defaultNamespace(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder disableAutoDetectNamespaces() {
super.disableAutoDetectNamespaces();
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder disableAutoDetectNamespaces(boolean value) {
super.disableAutoDetectNamespaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder enableNamespaces() {
super.enableNamespaces();
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder enableNamespaces(boolean value) {
super.enableNamespaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder namespaces(Namespace...values) {
super.namespaces(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.xml.XmlSerializer.Builder */
public Builder ns() {
super.ns();
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder addBeanTypesHtml() {
super.addBeanTypesHtml();
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder addBeanTypesHtml(boolean value) {
super.addBeanTypesHtml(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder addKeyValueTableHeaders() {
super.addKeyValueTableHeaders();
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder addKeyValueTableHeaders(boolean value) {
super.addKeyValueTableHeaders(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder disableDetectLabelParameters() {
super.disableDetectLabelParameters();
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder disableDetectLabelParameters(boolean value) {
super.disableDetectLabelParameters(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder disableDetectLinksInStrings() {
super.disableDetectLinksInStrings();
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder disableDetectLinksInStrings(boolean value) {
super.disableDetectLinksInStrings(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder labelParameter(String value) {
super.labelParameter(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.html.HtmlSerializer.Builder */
public Builder uriAnchorText(AnchorText value) {
super.uriAnchorText(value);
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
final JsonSchemaGenerator generator;
/**
* Constructor.
*
* @param builder The builder for this serializer.
*/
public HtmlSchemaSerializer(Builder builder) {
super(builder.detectRecursions().ignoreRecursions());
generator = builder.generatorBuilder.build();
}
@Override /* Context */
public Builder copy() {
return new Builder(this);
}
@Override /* Context */
public HtmlSchemaSerializerSession.Builder createSession() {
return HtmlSchemaSerializerSession.create(this);
}
@Override /* Context */
public HtmlSchemaSerializerSession getSession() {
return createSession().build();
}
JsonSchemaGenerator getGenerator() {
return generator;
}
//-----------------------------------------------------------------------------------------------------------------
// Other methods
//-----------------------------------------------------------------------------------------------------------------
@Override /* Context */
protected JsonMap properties() {
return filteredMap("generator", generator);
}
}
|
googleapis/google-cloud-java | 37,811 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListClustersRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/streams_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Message for requesting list of Clusters.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListClustersRequest}
*/
public final class ListClustersRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListClustersRequest)
ListClustersRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListClustersRequest.newBuilder() to construct.
private ListClustersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListClustersRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListClustersRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.StreamsServiceProto
.internal_static_google_cloud_visionai_v1_ListClustersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.StreamsServiceProto
.internal_static_google_cloud_visionai_v1_ListClustersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListClustersRequest.class,
com.google.cloud.visionai.v1.ListClustersRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ListClustersRequest)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ListClustersRequest other =
(com.google.cloud.visionai.v1.ListClustersRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListClustersRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.ListClustersRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Clusters.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListClustersRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListClustersRequest)
com.google.cloud.visionai.v1.ListClustersRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.StreamsServiceProto
.internal_static_google_cloud_visionai_v1_ListClustersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.StreamsServiceProto
.internal_static_google_cloud_visionai_v1_ListClustersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListClustersRequest.class,
com.google.cloud.visionai.v1.ListClustersRequest.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ListClustersRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.StreamsServiceProto
.internal_static_google_cloud_visionai_v1_ListClustersRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListClustersRequest getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ListClustersRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListClustersRequest build() {
com.google.cloud.visionai.v1.ListClustersRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListClustersRequest buildPartial() {
com.google.cloud.visionai.v1.ListClustersRequest result =
new com.google.cloud.visionai.v1.ListClustersRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.ListClustersRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ListClustersRequest) {
return mergeFrom((com.google.cloud.visionai.v1.ListClustersRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ListClustersRequest other) {
if (other == com.google.cloud.visionai.v1.ListClustersRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListClustersRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListClustersRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListClustersRequest)
private static final com.google.cloud.visionai.v1.ListClustersRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListClustersRequest();
}
public static com.google.cloud.visionai.v1.ListClustersRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListClustersRequest> PARSER =
new com.google.protobuf.AbstractParser<ListClustersRequest>() {
@java.lang.Override
public ListClustersRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListClustersRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListClustersRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListClustersRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,947 | java-contentwarehouse/proto-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/ListLinkedSourcesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contentwarehouse/v1/document_link_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contentwarehouse.v1;
/**
*
*
* <pre>
* Response message for DocumentLinkService.ListLinkedSources.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse}
*/
public final class ListLinkedSourcesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse)
ListLinkedSourcesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLinkedSourcesResponse.newBuilder() to construct.
private ListLinkedSourcesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLinkedSourcesResponse() {
documentLinks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLinkedSourcesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedSourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedSourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.class,
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.Builder.class);
}
public static final int DOCUMENT_LINKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> documentLinks_;
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> getDocumentLinksList() {
return documentLinks_;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksOrBuilderList() {
return documentLinks_;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public int getDocumentLinksCount() {
return documentLinks_.size();
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.DocumentLink getDocumentLinks(int index) {
return documentLinks_.get(index);
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder getDocumentLinksOrBuilder(
int index) {
return documentLinks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < documentLinks_.size(); i++) {
output.writeMessage(1, documentLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < documentLinks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documentLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse)) {
return super.equals(obj);
}
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse other =
(com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse) obj;
if (!getDocumentLinksList().equals(other.getDocumentLinksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDocumentLinksCount() > 0) {
hash = (37 * hash) + DOCUMENT_LINKS_FIELD_NUMBER;
hash = (53 * hash) + getDocumentLinksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for DocumentLinkService.ListLinkedSources.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse)
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedSourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedSourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.class,
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.Builder.class);
}
// Construct using com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (documentLinksBuilder_ == null) {
documentLinks_ = java.util.Collections.emptyList();
} else {
documentLinks_ = null;
documentLinksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedSourcesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse
getDefaultInstanceForType() {
return com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse build() {
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse buildPartial() {
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse result =
new com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse result) {
if (documentLinksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
documentLinks_ = java.util.Collections.unmodifiableList(documentLinks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.documentLinks_ = documentLinks_;
} else {
result.documentLinks_ = documentLinksBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse) {
return mergeFrom((com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse other) {
if (other
== com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse.getDefaultInstance())
return this;
if (documentLinksBuilder_ == null) {
if (!other.documentLinks_.isEmpty()) {
if (documentLinks_.isEmpty()) {
documentLinks_ = other.documentLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDocumentLinksIsMutable();
documentLinks_.addAll(other.documentLinks_);
}
onChanged();
}
} else {
if (!other.documentLinks_.isEmpty()) {
if (documentLinksBuilder_.isEmpty()) {
documentLinksBuilder_.dispose();
documentLinksBuilder_ = null;
documentLinks_ = other.documentLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
documentLinksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDocumentLinksFieldBuilder()
: null;
} else {
documentLinksBuilder_.addAllMessages(other.documentLinks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.contentwarehouse.v1.DocumentLink m =
input.readMessage(
com.google.cloud.contentwarehouse.v1.DocumentLink.parser(),
extensionRegistry);
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(m);
} else {
documentLinksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> documentLinks_ =
java.util.Collections.emptyList();
private void ensureDocumentLinksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
documentLinks_ =
new java.util.ArrayList<com.google.cloud.contentwarehouse.v1.DocumentLink>(
documentLinks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
documentLinksBuilder_;
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink>
getDocumentLinksList() {
if (documentLinksBuilder_ == null) {
return java.util.Collections.unmodifiableList(documentLinks_);
} else {
return documentLinksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public int getDocumentLinksCount() {
if (documentLinksBuilder_ == null) {
return documentLinks_.size();
} else {
return documentLinksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink getDocumentLinks(int index) {
if (documentLinksBuilder_ == null) {
return documentLinks_.get(index);
} else {
return documentLinksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder setDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.set(index, value);
onChanged();
} else {
documentLinksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder setDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.set(index, builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.add(value);
onChanged();
} else {
documentLinksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.add(index, value);
onChanged();
} else {
documentLinksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(index, builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addAllDocumentLinks(
java.lang.Iterable<? extends com.google.cloud.contentwarehouse.v1.DocumentLink> values) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, documentLinks_);
onChanged();
} else {
documentLinksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder clearDocumentLinks() {
if (documentLinksBuilder_ == null) {
documentLinks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
documentLinksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder removeDocumentLinks(int index) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.remove(index);
onChanged();
} else {
documentLinksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder getDocumentLinksBuilder(
int index) {
return getDocumentLinksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder getDocumentLinksOrBuilder(
int index) {
if (documentLinksBuilder_ == null) {
return documentLinks_.get(index);
} else {
return documentLinksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksOrBuilderList() {
if (documentLinksBuilder_ != null) {
return documentLinksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(documentLinks_);
}
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder addDocumentLinksBuilder() {
return getDocumentLinksFieldBuilder()
.addBuilder(com.google.cloud.contentwarehouse.v1.DocumentLink.getDefaultInstance());
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder addDocumentLinksBuilder(
int index) {
return getDocumentLinksFieldBuilder()
.addBuilder(
index, com.google.cloud.contentwarehouse.v1.DocumentLink.getDefaultInstance());
}
/**
*
*
* <pre>
* Source document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink.Builder>
getDocumentLinksBuilderList() {
return getDocumentLinksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksFieldBuilder() {
if (documentLinksBuilder_ == null) {
documentLinksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>(
documentLinks_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
documentLinks_ = null;
}
return documentLinksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse)
private static final com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse();
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLinkedSourcesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListLinkedSourcesResponse>() {
@java.lang.Override
public ListLinkedSourcesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLinkedSourcesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLinkedSourcesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedSourcesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,947 | java-contentwarehouse/proto-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/ListLinkedTargetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contentwarehouse/v1/document_link_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contentwarehouse.v1;
/**
*
*
* <pre>
* Response message for DocumentLinkService.ListLinkedTargets.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse}
*/
public final class ListLinkedTargetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse)
ListLinkedTargetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLinkedTargetsResponse.newBuilder() to construct.
private ListLinkedTargetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLinkedTargetsResponse() {
documentLinks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLinkedTargetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedTargetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedTargetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.class,
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.Builder.class);
}
public static final int DOCUMENT_LINKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> documentLinks_;
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> getDocumentLinksList() {
return documentLinks_;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksOrBuilderList() {
return documentLinks_;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public int getDocumentLinksCount() {
return documentLinks_.size();
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.DocumentLink getDocumentLinks(int index) {
return documentLinks_.get(index);
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder getDocumentLinksOrBuilder(
int index) {
return documentLinks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < documentLinks_.size(); i++) {
output.writeMessage(1, documentLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < documentLinks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documentLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse)) {
return super.equals(obj);
}
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse other =
(com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse) obj;
if (!getDocumentLinksList().equals(other.getDocumentLinksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDocumentLinksCount() > 0) {
hash = (37 * hash) + DOCUMENT_LINKS_FIELD_NUMBER;
hash = (53 * hash) + getDocumentLinksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for DocumentLinkService.ListLinkedTargets.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse)
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedTargetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedTargetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.class,
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.Builder.class);
}
// Construct using com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (documentLinksBuilder_ == null) {
documentLinks_ = java.util.Collections.emptyList();
} else {
documentLinks_ = null;
documentLinksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contentwarehouse.v1.DocumentLinkServiceProto
.internal_static_google_cloud_contentwarehouse_v1_ListLinkedTargetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse
getDefaultInstanceForType() {
return com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse build() {
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse buildPartial() {
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse result =
new com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse result) {
if (documentLinksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
documentLinks_ = java.util.Collections.unmodifiableList(documentLinks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.documentLinks_ = documentLinks_;
} else {
result.documentLinks_ = documentLinksBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse) {
return mergeFrom((com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse other) {
if (other
== com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse.getDefaultInstance())
return this;
if (documentLinksBuilder_ == null) {
if (!other.documentLinks_.isEmpty()) {
if (documentLinks_.isEmpty()) {
documentLinks_ = other.documentLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDocumentLinksIsMutable();
documentLinks_.addAll(other.documentLinks_);
}
onChanged();
}
} else {
if (!other.documentLinks_.isEmpty()) {
if (documentLinksBuilder_.isEmpty()) {
documentLinksBuilder_.dispose();
documentLinksBuilder_ = null;
documentLinks_ = other.documentLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
documentLinksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDocumentLinksFieldBuilder()
: null;
} else {
documentLinksBuilder_.addAllMessages(other.documentLinks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.contentwarehouse.v1.DocumentLink m =
input.readMessage(
com.google.cloud.contentwarehouse.v1.DocumentLink.parser(),
extensionRegistry);
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(m);
} else {
documentLinksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink> documentLinks_ =
java.util.Collections.emptyList();
private void ensureDocumentLinksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
documentLinks_ =
new java.util.ArrayList<com.google.cloud.contentwarehouse.v1.DocumentLink>(
documentLinks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
documentLinksBuilder_;
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink>
getDocumentLinksList() {
if (documentLinksBuilder_ == null) {
return java.util.Collections.unmodifiableList(documentLinks_);
} else {
return documentLinksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public int getDocumentLinksCount() {
if (documentLinksBuilder_ == null) {
return documentLinks_.size();
} else {
return documentLinksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink getDocumentLinks(int index) {
if (documentLinksBuilder_ == null) {
return documentLinks_.get(index);
} else {
return documentLinksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder setDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.set(index, value);
onChanged();
} else {
documentLinksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder setDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.set(index, builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.add(value);
onChanged();
} else {
documentLinksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink value) {
if (documentLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentLinksIsMutable();
documentLinks_.add(index, value);
onChanged();
} else {
documentLinksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addDocumentLinks(
int index, com.google.cloud.contentwarehouse.v1.DocumentLink.Builder builderForValue) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.add(index, builderForValue.build());
onChanged();
} else {
documentLinksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder addAllDocumentLinks(
java.lang.Iterable<? extends com.google.cloud.contentwarehouse.v1.DocumentLink> values) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, documentLinks_);
onChanged();
} else {
documentLinksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder clearDocumentLinks() {
if (documentLinksBuilder_ == null) {
documentLinks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
documentLinksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public Builder removeDocumentLinks(int index) {
if (documentLinksBuilder_ == null) {
ensureDocumentLinksIsMutable();
documentLinks_.remove(index);
onChanged();
} else {
documentLinksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder getDocumentLinksBuilder(
int index) {
return getDocumentLinksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder getDocumentLinksOrBuilder(
int index) {
if (documentLinksBuilder_ == null) {
return documentLinks_.get(index);
} else {
return documentLinksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksOrBuilderList() {
if (documentLinksBuilder_ != null) {
return documentLinksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(documentLinks_);
}
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder addDocumentLinksBuilder() {
return getDocumentLinksFieldBuilder()
.addBuilder(com.google.cloud.contentwarehouse.v1.DocumentLink.getDefaultInstance());
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.DocumentLink.Builder addDocumentLinksBuilder(
int index) {
return getDocumentLinksFieldBuilder()
.addBuilder(
index, com.google.cloud.contentwarehouse.v1.DocumentLink.getDefaultInstance());
}
/**
*
*
* <pre>
* Target document-links.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.DocumentLink document_links = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.DocumentLink.Builder>
getDocumentLinksBuilderList() {
return getDocumentLinksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>
getDocumentLinksFieldBuilder() {
if (documentLinksBuilder_ == null) {
documentLinksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.DocumentLink,
com.google.cloud.contentwarehouse.v1.DocumentLink.Builder,
com.google.cloud.contentwarehouse.v1.DocumentLinkOrBuilder>(
documentLinks_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
documentLinks_ = null;
}
return documentLinksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse)
private static final com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse();
}
public static com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLinkedTargetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListLinkedTargetsResponse>() {
@java.lang.Override
public ListLinkedTargetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLinkedTargetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLinkedTargetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListLinkedTargetsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/cxf | 38,190 | tools/wsdlto/core/src/main/java/org/apache/cxf/tools/wsdlto/WSDLToJavaContainer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.tools.wsdlto;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Writer;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.wsdl.Definition;
import javax.wsdl.factory.WSDLFactory;
import javax.wsdl.xml.WSDLWriter;
import javax.xml.namespace.QName;
import org.w3c.dom.Element;
import org.apache.cxf.Bus;
import org.apache.cxf.common.i18n.Message;
import org.apache.cxf.common.logging.LogUtils;
import org.apache.cxf.common.util.PropertiesLoaderUtils;
import org.apache.cxf.common.util.StringUtils;
import org.apache.cxf.common.util.URIParserUtil;
import org.apache.cxf.common.xmlschema.SchemaCollection;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.helpers.DOMUtils;
import org.apache.cxf.helpers.FileUtils;
import org.apache.cxf.helpers.IOUtils;
import org.apache.cxf.helpers.LoadingByteArrayOutputStream;
import org.apache.cxf.service.model.InterfaceInfo;
import org.apache.cxf.service.model.ServiceInfo;
import org.apache.cxf.staxutils.StaxUtils;
import org.apache.cxf.tools.common.AbstractCXFToolContainer;
import org.apache.cxf.tools.common.ClassNameProcessor;
import org.apache.cxf.tools.common.ClassUtils;
import org.apache.cxf.tools.common.FrontEndGenerator;
import org.apache.cxf.tools.common.Processor;
import org.apache.cxf.tools.common.ToolConstants;
import org.apache.cxf.tools.common.ToolContext;
import org.apache.cxf.tools.common.ToolException;
import org.apache.cxf.tools.common.toolspec.ToolSpec;
import org.apache.cxf.tools.common.toolspec.parser.BadUsageException;
import org.apache.cxf.tools.common.toolspec.parser.CommandDocument;
import org.apache.cxf.tools.common.toolspec.parser.ErrorVisitor;
import org.apache.cxf.tools.util.ClassCollector;
import org.apache.cxf.tools.util.FileWriterUtil;
import org.apache.cxf.tools.util.OutputStreamCreator;
import org.apache.cxf.tools.validator.ServiceValidator;
import org.apache.cxf.tools.wsdlto.core.AbstractWSDLBuilder;
import org.apache.cxf.tools.wsdlto.core.DataBindingProfile;
import org.apache.cxf.tools.wsdlto.core.FrontEndProfile;
import org.apache.cxf.wsdl.WSDLConstants;
import org.apache.cxf.wsdl.WSDLManager;
import org.apache.cxf.wsdl11.WSDLServiceBuilder;
import org.apache.ws.commons.schema.XmlSchema;
public class WSDLToJavaContainer extends AbstractCXFToolContainer {
protected static final Logger LOG = LogUtils.getL7dLogger(WSDLToJavaContainer.class);
private static final String DEFAULT_NS2PACKAGE = "http://www.w3.org/2005/08/addressing";
private static final String SERVICE_VALIDATOR = "META-INF/tools.service.validator.xml";
String toolName;
public WSDLToJavaContainer(String name, ToolSpec toolspec) throws Exception {
super(name, toolspec);
this.toolName = name;
}
public Set<String> getArrayKeys() {
Set<String> set = new HashSet<>();
set.add(ToolConstants.CFG_PACKAGENAME);
set.add(ToolConstants.CFG_NEXCLUDE);
set.add(ToolConstants.CFG_XJC_ARGS);
return set;
}
public WSDLConstants.WSDLVersion getWSDLVersion() {
String version = (String)context.get(ToolConstants.CFG_WSDL_VERSION);
return WSDLConstants.getVersion(version);
}
public void execute() throws ToolException {
if (hasInfoOption()) {
return;
}
buildToolContext();
boolean isWsdlList = context.optionSet(ToolConstants.CFG_WSDLLIST);
if (isWsdlList) {
try {
ToolContext initialContextState = context.makeCopy();
String wsdlURL = (String)context.get(ToolConstants.CFG_WSDLURL);
wsdlURL = URIParserUtil.getAbsoluteURI(wsdlURL);
URL url = new URL(wsdlURL);
InputStream is = (InputStream)url.getContent();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(is))) {
String tempLine;
while ((tempLine = reader.readLine()) != null) {
ToolContext freshContext = initialContextState.makeCopy();
freshContext.put(ToolConstants.CFG_WSDLURL, tempLine);
setContext(freshContext);
buildToolContext();
processWsdl();
}
if (context.getErrorListener().getErrorCount() > 0) {
context.getErrorListener().throwToolException();
}
}
} catch (IOException e) {
throw new ToolException(e);
}
} else {
processWsdl();
if (context.getErrorListener().getErrorCount() > 0) {
context.getErrorListener().throwToolException();
}
}
}
private void processWsdl() {
validate(context);
FrontEndProfile frontend = context.get(FrontEndProfile.class);
if (frontend == null) {
throw new ToolException(new Message("FOUND_NO_FRONTEND", LOG));
}
WSDLConstants.WSDLVersion version = getWSDLVersion();
String wsdlURL = (String)context.get(ToolConstants.CFG_WSDLURL);
@SuppressWarnings("unchecked")
List<ServiceInfo> serviceList = (List<ServiceInfo>)context.get(ToolConstants.SERVICE_LIST);
if (serviceList == null) {
serviceList = new ArrayList<>();
// Build the ServiceModel from the WSDLModel
if (version == WSDLConstants.WSDLVersion.WSDL11) {
AbstractWSDLBuilder builder = frontend.getWSDLBuilder();
builder.setContext(context);
builder.setBus(getBus());
context.put(Bus.class, getBus());
wsdlURL = URIParserUtil.getAbsoluteURI(wsdlURL);
builder.build(wsdlURL);
builder.customize();
Definition definition = builder.getWSDLModel();
context.put(Definition.class, definition);
builder.validate(definition);
WSDLServiceBuilder serviceBuilder = new WSDLServiceBuilder(getBus());
if (context.isVerbose()) {
serviceBuilder.setUnwrapLogLevel(Level.INFO);
}
serviceBuilder.setIgnoreUnknownBindings(true);
String allowRefs = (String)context.get(ToolConstants.CFG_ALLOW_ELEMENT_REFS);
if (!StringUtils.isEmpty(allowRefs)
|| context.optionSet(ToolConstants.CFG_ALLOW_ELEMENT_REFS)) {
if (allowRefs.length() > 0 && allowRefs.charAt(0) == '=') {
allowRefs = allowRefs.substring(1);
}
if (StringUtils.isEmpty(allowRefs)) {
allowRefs = "true";
}
serviceBuilder.setAllowElementRefs(Boolean.valueOf(allowRefs));
}
String serviceName = (String)context.get(ToolConstants.CFG_SERVICENAME);
if (serviceName != null) {
List<ServiceInfo> services = serviceBuilder
.buildServices(definition, getServiceQName(definition));
serviceList.addAll(services);
} else if (definition.getServices().size() > 0) {
serviceList = serviceBuilder.buildServices(definition);
} else {
serviceList = serviceBuilder.buildMockServices(definition);
}
//remove definition from cache so that won't fail when encounter same wsdl file
//name but different wsdl content(CXF-3340)
getBus().getExtension(WSDLManager.class).removeDefinition(definition);
} else {
// TODO: wsdl2.0 support
}
}
context.put(ToolConstants.SERVICE_LIST, serviceList);
Map<String, InterfaceInfo> interfaces = new LinkedHashMap<>();
ServiceInfo service0 = serviceList.get(0);
SchemaCollection schemaCollection = service0.getXmlSchemaCollection();
context.put(ToolConstants.XML_SCHEMA_COLLECTION, schemaCollection);
context.put(ToolConstants.PORTTYPE_MAP, interfaces);
context.put(ClassCollector.class, createClassCollector());
Processor processor = frontend.getProcessor();
if (processor instanceof ClassNameProcessor) {
processor.setEnvironment(context);
for (ServiceInfo service : serviceList) {
context.put(ServiceInfo.class, service);
((ClassNameProcessor)processor).processClassNames();
context.put(ServiceInfo.class, null);
}
}
if (context.optionSet(ToolConstants.CFG_NO_TYPES)) {
context.remove(ToolConstants.CFG_TYPES);
context.remove(ToolConstants.CFG_ALL);
context.remove(ToolConstants.CFG_COMPILE);
}
generateTypes();
if (context.getErrorListener().getErrorCount() > 0) {
return;
}
for (ServiceInfo service : serviceList) {
context.put(ServiceInfo.class, service);
if (context.basicValidateWSDL()) {
validate(service);
}
if (context.getErrorListener().getErrorCount() == 0) {
// Build the JavaModel from the ServiceModel
processor.setEnvironment(context);
processor.process();
}
}
if (context.getErrorListener().getErrorCount() > 0) {
return;
}
if (context.optionSet(ToolConstants.CFG_CLIENT_JAR)) {
enforceWSDLLocation(context);
}
if (!isSuppressCodeGen()) {
// Generate artifacts
for (FrontEndGenerator generator : frontend.getGenerators()) {
generator.generate(context);
}
}
context.remove(ToolConstants.SERVICE_LIST);
// Build projects: compile classes and copy resources etc.
if (context.optionSet(ToolConstants.CFG_COMPILE)) {
new ClassUtils().compile(context);
}
if (context.isExcludeNamespaceEnabled()) {
try {
removeExcludeFiles();
} catch (IOException e) {
throw new ToolException(e);
}
}
if (context.optionSet(ToolConstants.CFG_CLIENT_JAR)) {
processClientJar(context);
}
}
private void enforceWSDLLocation(ToolContext context) {
String wsdlURL = (String)context.get(ToolConstants.CFG_WSDLURL);
@SuppressWarnings("unchecked")
List<ServiceInfo> serviceList = (List<ServiceInfo>)context.get(ToolConstants.SERVICE_LIST);
int slashIndex = wsdlURL.lastIndexOf('/');
int dotIndex = wsdlURL.indexOf('.', slashIndex);
String wsdlLocation = null;
if (slashIndex > -1 && dotIndex > -1) {
wsdlLocation = wsdlURL.substring(slashIndex + 1, dotIndex) + ".wsdl";
}
if (wsdlLocation == null) {
wsdlLocation = serviceList.get(0).getName().getLocalPart() + ".wsdl";
}
context.put(ToolConstants.CFG_WSDLLOCATION, wsdlLocation);
}
private void processClientJar(ToolContext context) {
ClassCollector oldCollector = context.get(ClassCollector.class);
ClassCollector newCollector = new ClassCollector();
String oldClassDir = (String)context.get(ToolConstants.CFG_CLASSDIR);
File tmpDir = FileUtils.createTmpDir();
context.put(ToolConstants.CFG_CLASSDIR, tmpDir.getAbsolutePath());
newCollector.setTypesClassNames(oldCollector.getTypesClassNames());
newCollector.setSeiClassNames(oldCollector.getSeiClassNames());
newCollector.setExceptionClassNames(oldCollector.getExceptionClassNames());
newCollector.setServiceClassNames(oldCollector.getServiceClassNames());
context.put(ClassCollector.class, newCollector);
new ClassUtils().compile(context);
generateLocalWSDL(context);
File clientJarFile = new File((String)context.get(ToolConstants.CFG_OUTPUTDIR),
(String)context.get(ToolConstants.CFG_CLIENT_JAR));
try (JarOutputStream jarout = new JarOutputStream(
Files.newOutputStream(clientJarFile.toPath()), new Manifest())) {
createClientJar(tmpDir, jarout);
} catch (Exception e) {
LOG.log(Level.SEVERE, "FAILED_TO_CREAT_CLIENTJAR", e);
Message msg = new Message("FAILED_TO_CREAT_CLIENTJAR", LOG);
throw new ToolException(msg, e);
}
context.put(ToolConstants.CFG_CLASSDIR, oldClassDir);
context.put(ClassCollector.class, oldCollector);
}
private void createClientJar(File tmpDirectory, JarOutputStream jarout) {
try {
URI parentFile = new File((String)context.get(ToolConstants.CFG_CLASSDIR)).toURI();
File[] files = tmpDirectory.listFiles();
if (files != null) {
for (File file : files) {
URI relativePath = parentFile.relativize(file.toURI());
String name = relativePath.toString();
if (file.isDirectory()) {
if (!StringUtils.isEmpty(name)) {
if (!name.endsWith("/")) {
name += "/";
}
JarEntry entry = new JarEntry(name);
entry.setTime(file.lastModified());
jarout.putNextEntry(entry);
jarout.closeEntry();
}
createClientJar(file, jarout);
continue;
}
JarEntry entry = new JarEntry(name);
entry.setTime(file.lastModified());
jarout.putNextEntry(entry);
InputStream input = new BufferedInputStream(Files.newInputStream(file.toPath()));
IOUtils.copy(input, jarout);
input.close();
jarout.closeEntry();
}
}
} catch (Exception e) {
Message msg = new Message("FAILED_ADD_JARENTRY", LOG);
throw new ToolException(msg, e);
}
}
private boolean isSuppressCodeGen() {
return context.optionSet(ToolConstants.CFG_SUPPRESS_GEN);
}
public void execute(boolean exitOnFinish) throws ToolException {
try {
if (getArgument() != null) {
super.execute(exitOnFinish);
}
execute();
} catch (ToolException ex) {
if (ex.getCause() instanceof BadUsageException) {
printUsageException(toolName, (BadUsageException)ex.getCause());
}
throw ex;
} catch (Exception ex) {
throw new ToolException(ex);
} finally {
tearDown();
}
}
@SuppressWarnings("unchecked")
public QName getServiceQName(Definition def) {
List<Definition> defs = new ArrayList<>();
defs.add(def);
Iterator<?> ite1 = def.getImports().values().iterator();
while (ite1.hasNext()) {
List<javax.wsdl.Import> defList = CastUtils.cast((List<?>)ite1.next());
for (javax.wsdl.Import importDef : defList) {
defs.add(importDef.getDefinition());
}
}
String serviceName = (String)context.get(ToolConstants.CFG_SERVICENAME);
for (Definition definition : defs) {
if (serviceName != null) {
for (Iterator<QName> ite = definition.getServices().keySet().iterator(); ite.hasNext();) {
QName qn = ite.next();
if (qn.getLocalPart().equalsIgnoreCase(serviceName)) {
return qn;
}
}
}
}
Message msg = new Message("SERVICE_NOT_FOUND", LOG, new Object[] {serviceName});
throw new ToolException(msg);
}
public void loadDefaultNSPackageMapping(ToolContext env) {
if (!env.hasExcludeNamespace(DEFAULT_NS2PACKAGE)
&& env.getBooleanValue(ToolConstants.CFG_DEFAULT_NS, "true")
&& env.get(ToolConstants.CFG_NO_ADDRESS_BINDING) != null) {
// currently namespace2pacakge.cfg only contains wsadressing mapping
env.loadDefaultNS2Pck(getResourceAsStream("namespace2package.cfg"));
}
if (env.getBooleanValue(ToolConstants.CFG_DEFAULT_EX, "true")) {
env.loadDefaultExcludes(getResourceAsStream("wsdltojavaexclude.cfg"));
}
}
public void setExcludePackageAndNamespaces(ToolContext env) {
if (env.get(ToolConstants.CFG_NEXCLUDE) != null) {
String[] pns;
try {
pns = (String[])env.get(ToolConstants.CFG_NEXCLUDE);
} catch (ClassCastException e) {
pns = new String[] {(String)env.get(ToolConstants.CFG_NEXCLUDE)};
}
for (int j = 0; j < pns.length; j++) {
int pos = pns[j].indexOf('=');
if (pos != -1) {
String ns = pns[j].substring(0, pos);
if (ns.equals(ToolConstants.WSA_NAMESPACE_URI)) {
env.put(ToolConstants.CFG_NO_ADDRESS_BINDING, ToolConstants.CFG_NO_ADDRESS_BINDING);
}
String excludePackagename = pns[j].substring(pos + 1);
env.addExcludeNamespacePackageMap(ns, excludePackagename);
env.addNamespacePackageMap(ns, excludePackagename);
} else {
env.addExcludeNamespacePackageMap(pns[j], env.mapPackageName(pns[j]));
}
}
}
}
public void setPackageAndNamespaces(ToolContext env) {
if (env.get(ToolConstants.CFG_PACKAGENAME) != null) {
String[] pns;
try {
pns = (String[])env.get(ToolConstants.CFG_PACKAGENAME);
} catch (ClassCastException e) {
pns = new String[] {(String)env.get(ToolConstants.CFG_PACKAGENAME)};
}
for (int j = 0; j < pns.length; j++) {
int pos = pns[j].indexOf('=');
String packagename = pns[j];
if (pos != -1) {
String ns = pns[j].substring(0, pos);
if (ns.equals(ToolConstants.WSA_NAMESPACE_URI)) {
env.put(ToolConstants.CFG_NO_ADDRESS_BINDING, ToolConstants.CFG_NO_ADDRESS_BINDING);
}
packagename = pns[j].substring(pos + 1);
env.addNamespacePackageMap(ns, packagename);
} else {
env.setPackageName(packagename);
}
}
}
}
public void validate(ToolContext env) throws ToolException {
String outdir = (String)env.get(ToolConstants.CFG_OUTPUTDIR);
if (!isSuppressCodeGen()) {
if (outdir != null) {
File dir = new File(outdir);
if (!dir.exists() && !dir.mkdirs()) {
Message msg = new Message("DIRECTORY_COULD_NOT_BE_CREATED", LOG, outdir);
throw new ToolException(msg);
}
if (!dir.isDirectory()) {
Message msg = new Message("NOT_A_DIRECTORY", LOG, outdir);
throw new ToolException(msg);
}
}
if (env.optionSet(ToolConstants.CFG_COMPILE)) {
String clsdir = (String)env.get(ToolConstants.CFG_CLASSDIR);
if (clsdir != null) {
File dir = new File(clsdir);
if (!dir.exists() && !dir.mkdirs()) {
Message msg = new Message("DIRECTORY_COULD_NOT_BE_CREATED", LOG, clsdir);
throw new ToolException(msg);
}
}
}
}
String wsdl = (String)env.get(ToolConstants.CFG_WSDLURL);
if (StringUtils.isEmpty(wsdl)) {
Message msg = new Message("NO_WSDL_URL", LOG);
throw new ToolException(msg);
}
env.put(ToolConstants.CFG_WSDLURL, URIParserUtil.getAbsoluteURI(wsdl));
if (!env.containsKey(ToolConstants.CFG_WSDLLOCATION)) {
//make sure the "raw" form is used for the wsdlLocation
//instead of the absolute URI that normalize may return
boolean assumeFileURI = false;
try {
URI uri = new URI(wsdl);
String uriScheme = uri.getScheme();
if (uriScheme == null) {
assumeFileURI = true;
}
wsdl = uri.toString();
} catch (Exception e) {
//not a URL, assume file
assumeFileURI = true;
}
if (assumeFileURI) {
if (wsdl.indexOf(':') != -1 && !wsdl.startsWith("/")) {
wsdl = "file:/" + wsdl;
} else {
wsdl = "file:" + wsdl;
}
try {
URI uri = new URI(wsdl);
wsdl = uri.toString();
} catch (Exception e1) {
//ignore...
}
}
wsdl = wsdl.replace("\\", "/");
env.put(ToolConstants.CFG_WSDLLOCATION, wsdl);
}
String[] bindingFiles;
try {
bindingFiles = (String[])env.get(ToolConstants.CFG_BINDING);
if (bindingFiles == null) {
return;
}
} catch (ClassCastException e) {
bindingFiles = new String[1];
bindingFiles[0] = (String)env.get(ToolConstants.CFG_BINDING);
}
for (int i = 0; i < bindingFiles.length; i++) {
bindingFiles[i] = URIParserUtil.getAbsoluteURI(bindingFiles[i]);
}
env.put(ToolConstants.CFG_BINDING, bindingFiles);
}
public void setAntProperties(ToolContext env) {
String installDir = System.getProperty("install.dir");
if (installDir != null) {
env.put(ToolConstants.CFG_INSTALL_DIR, installDir);
} else {
env.put(ToolConstants.CFG_INSTALL_DIR, ".");
}
}
protected void setLibraryReferences(ToolContext env) {
Properties props = loadProperties(getResourceAsStream("wsdltojavalib.properties"));
if (props != null) {
for (Map.Entry<Object, Object> entry : props.entrySet()) {
env.put((String)entry.getKey(), entry.getValue());
}
}
env.put(ToolConstants.CFG_ANT_PROP, props);
}
public void buildToolContext() {
context = getContext();
context.addParameters(getParametersMap(getArrayKeys()));
if (context.get(ToolConstants.CFG_OUTPUTDIR) == null) {
context.put(ToolConstants.CFG_OUTPUTDIR, ".");
}
if (context.containsKey(ToolConstants.CFG_ANT)) {
setAntProperties(context);
setLibraryReferences(context);
}
if (!context.containsKey(ToolConstants.CFG_WSDL_VERSION)) {
context.put(ToolConstants.CFG_WSDL_VERSION, WSDLConstants.WSDL11);
}
context.put(ToolConstants.CFG_SUPPRESS_WARNINGS, true);
loadDefaultNSPackageMapping(context);
setPackageAndNamespaces(context);
setExcludePackageAndNamespaces(context);
}
protected static InputStream getResourceAsStream(String file) {
return WSDLToJavaContainer.class.getResourceAsStream(file);
}
public void checkParams(ErrorVisitor errors) throws ToolException {
CommandDocument doc = super.getCommandDocument();
if (!doc.hasParameter("wsdlurl")) {
errors.add(new ErrorVisitor.UserError("WSDL/SCHEMA URL has to be specified"));
}
if (errors.getErrors().size() > 0) {
Message msg = new Message("PARAMETER_MISSING", LOG);
throw new ToolException(msg, new BadUsageException(getUsage(), errors));
}
}
public void removeExcludeFiles() throws IOException {
List<String> excludeGenFiles = context.getExcludeFileList();
if (excludeGenFiles == null) {
return;
}
String outPutDir = (String)context.get(ToolConstants.CFG_OUTPUTDIR);
for (int i = 0; i < excludeGenFiles.size(); i++) {
String excludeFile = excludeGenFiles.get(i);
File file = new File(outPutDir, excludeFile);
file.delete();
File tmpFile = file.getParentFile();
while (tmpFile != null && !tmpFile.getCanonicalPath().equalsIgnoreCase(outPutDir)) {
if (tmpFile.isDirectory() && tmpFile.list() != null && tmpFile.list().length == 0) {
tmpFile.delete();
}
tmpFile = tmpFile.getParentFile();
}
if (context.get(ToolConstants.CFG_COMPILE) != null) {
String classDir = context.get(ToolConstants.CFG_CLASSDIR) == null
? outPutDir : (String)context.get(ToolConstants.CFG_CLASSDIR);
File classFile = new File(classDir, excludeFile.substring(0, excludeFile.indexOf(".java"))
+ ".class");
classFile.delete();
File tmpClzFile = classFile.getParentFile();
while (tmpClzFile != null && !tmpClzFile.getCanonicalPath().equalsIgnoreCase(outPutDir)) {
if (tmpClzFile.isDirectory() && tmpClzFile.list() != null
&& tmpClzFile.list().length == 0) {
tmpClzFile.delete();
}
tmpClzFile = tmpClzFile.getParentFile();
}
}
}
}
public boolean passthrough() {
if (context.optionSet(ToolConstants.CFG_GEN_TYPES) || context.optionSet(ToolConstants.CFG_ALL)) {
return false;
}
if (context.optionSet(ToolConstants.CFG_GEN_ANT) || context.optionSet(ToolConstants.CFG_GEN_CLIENT)
|| context.optionSet(ToolConstants.CFG_GEN_IMPL) || context.optionSet(ToolConstants.CFG_GEN_SEI)
|| context.optionSet(ToolConstants.CFG_GEN_SERVER)
|| context.optionSet(ToolConstants.CFG_GEN_SERVICE)
|| context.optionSet(ToolConstants.CFG_GEN_FAULT)) {
return true;
}
return context.optionSet(ToolConstants.CFG_NO_TYPES);
}
public void generateTypes() throws ToolException {
DataBindingProfile dataBindingProfile = context.get(DataBindingProfile.class);
if (dataBindingProfile == null) {
Message msg = new Message("FOUND_NO_DATABINDING", LOG);
throw new ToolException(msg);
}
dataBindingProfile.initialize(context);
if (passthrough()) {
return;
}
dataBindingProfile.generate(context);
}
public void validate(final ServiceInfo service) throws ToolException {
for (ServiceValidator validator : getServiceValidators()) {
service.setProperty(ToolContext.class.getName(), context);
validator.setService(service);
if (!validator.isValid()) {
throw new ToolException(validator.getErrorMessage());
}
}
}
public List<ServiceValidator> getServiceValidators() {
List<ServiceValidator> validators = new ArrayList<>();
final Properties initialExtensions;
try {
initialExtensions = PropertiesLoaderUtils.loadAllProperties(SERVICE_VALIDATOR, Thread
.currentThread().getContextClassLoader());
} catch (IOException ex) {
throw new RuntimeException(ex);
}
for (Iterator<?> it = initialExtensions.values().iterator(); it.hasNext();) {
String validatorClass = (String)it.next();
try {
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Found service validator : " + validatorClass);
}
ServiceValidator validator = (ServiceValidator)Class.forName(
validatorClass,
true,
Thread.currentThread()
.getContextClassLoader())
.getDeclaredConstructor().newInstance();
validators.add(validator);
} catch (Exception ex) {
LOG.log(Level.WARNING, "EXTENSION_ADD_FAILED_MSG", ex);
}
}
return validators;
}
@SuppressWarnings("unchecked")
private void generateLocalWSDL(ToolContext context) {
String outputdir = (String)context.get(ToolConstants.CFG_CLASSDIR);
File wsdlFile = new File(outputdir, (String)context.get(ToolConstants.CFG_WSDLLOCATION));
Definition def = context.get(Definition.class);
try {
//get imported schemas
int xsdCount = 0;
SchemaCollection schemas = (SchemaCollection) context.get(ToolConstants.XML_SCHEMA_COLLECTION);
Map<String, String> sourceMap = new HashMap<>();
for (XmlSchema imp : schemas.getXmlSchemas()) {
if (imp.getSourceURI() != null && !imp.getSourceURI().contains(".wsdl#")) {
String schemaFileName = "schema" + (++xsdCount) + ".xsd";
File sourceFile = new File(imp.getSourceURI());
sourceMap.put(createSchemaFileKey(imp.getTargetNamespace(), sourceFile.getName()), schemaFileName);
}
}
//get imported wsdls
int wsdlImportCount = 0;
List<Definition> defs = (List<Definition>)context.get(ToolConstants.IMPORTED_DEFINITION);
Map<String, String> importWSDLMap = new HashMap<>();
for (Definition importDef : defs) {
File importedWsdlFile;
if (!StringUtils.isEmpty(importDef.getDocumentBaseURI())) {
importedWsdlFile = new File(importDef.getDocumentBaseURI());
} else {
importedWsdlFile = new File(importDef.getQName().getLocalPart() + ".wsdl");
}
if (!FileUtils.isValidFileName(importedWsdlFile.getName())) {
importedWsdlFile = new File("import" + (++wsdlImportCount) + ".wsdl");
}
importWSDLMap.put(importDef.getTargetNamespace(), importedWsdlFile.getName());
}
final OutputStreamCreator outputStreamCreator;
if (context.get(OutputStreamCreator.class) != null) {
outputStreamCreator = context.get(OutputStreamCreator.class);
} else {
outputStreamCreator = new OutputStreamCreator();
context.put(OutputStreamCreator.class, outputStreamCreator);
}
for (XmlSchema imp : schemas.getXmlSchemas()) {
if (imp.getSourceURI() != null && !imp.getSourceURI().contains(".wsdl#")) {
File sourceFile = new File(imp.getSourceURI());
String schemaKey = createSchemaFileKey(imp.getTargetNamespace(), sourceFile.getName());
String schemaFileName = sourceMap.get(schemaKey);
File impfile = new File(wsdlFile.getParentFile(), schemaFileName);
Element el = imp.getSchemaDocument().getDocumentElement();
updateImports(el, sourceMap);
updateIncludes(el, sourceMap);
try (Writer os = new FileWriterUtil(impfile.getParent(), outputStreamCreator)
.getWriter(impfile, StandardCharsets.UTF_8.name())) {
StaxUtils.writeTo(el, os, 2);
}
}
}
WSDLWriter wsdlWriter = WSDLFactory.newInstance().newWSDLWriter();
//change the import location in wsdl file
try (OutputStream wsdloutput = new BufferedOutputStream(Files.newOutputStream(wsdlFile.toPath()))) {
LoadingByteArrayOutputStream bout = new LoadingByteArrayOutputStream();
wsdlWriter.writeWSDL(def, bout);
Element defEle = StaxUtils.read(bout.createInputStream()).getDocumentElement();
List<Element> xsdElements = DOMUtils.findAllElementsByTagNameNS(defEle,
WSDLConstants.NS_SCHEMA_XSD,
"schema");
for (Element xsdEle : xsdElements) {
updateImports(xsdEle, sourceMap);
updateIncludes(xsdEle, sourceMap);
}
updateWSDLImports(defEle, importWSDLMap);
StaxUtils.writeTo(defEle, wsdloutput);
}
for (Definition importDef : defs) {
File importWsdlFile = new File(outputdir, importWSDLMap.get(importDef.getTargetNamespace()));
try (OutputStream wsdlOs = new BufferedOutputStream(Files.newOutputStream(importWsdlFile.toPath()))) {
LoadingByteArrayOutputStream bout = new LoadingByteArrayOutputStream();
wsdlWriter.writeWSDL(importDef, bout);
Element importEle = StaxUtils.read(bout.createInputStream()).getDocumentElement();
List<Element> xsdElements = DOMUtils.findAllElementsByTagNameNS(importEle,
WSDLConstants.NS_SCHEMA_XSD, "schema");
for (Element xsdEle : xsdElements) {
updateImports(xsdEle, sourceMap);
updateIncludes(xsdEle, sourceMap);
}
updateWSDLImports(importEle, importWSDLMap);
StaxUtils.writeTo(importEle, wsdlOs);
}
}
} catch (Exception ex) {
LOG.log(Level.SEVERE, "FAILED_TO_GEN_LOCAL_WSDL", ex);
Message msg = new Message("FAILED_TO_GEN_LOCAL_WSDL", LOG);
throw new ToolException(msg, ex);
}
}
private static String createSchemaFileKey(String targetNamespace, String fileName) {
return targetNamespace + "_" + fileName;
}
private void updateImports(Element el, Map<String, String> sourceMap) {
List<Element> imps = DOMUtils.getChildrenWithName(el,
WSDLConstants.NS_SCHEMA_XSD,
"import");
for (Element e : imps) {
String ns = e.getAttribute("namespace");
updateSchemaLocation(sourceMap, ns, e);
}
}
private void updateIncludes(Element el, Map<String, String> sourceMap) {
List<Element> imps = DOMUtils.getChildrenWithName(el,
WSDLConstants.NS_SCHEMA_XSD,
"include");
String ns = el.getAttribute("targetNamespace");
for (Element e : imps) {
updateSchemaLocation(sourceMap, ns, e);
}
}
private static void updateSchemaLocation(Map<String, String> sourceMap, String namespace, Element e) {
File sourceFile = new File(e.getAttribute("schemaLocation"));
String schemaKey = createSchemaFileKey(namespace, sourceFile.getName());
e.setAttribute("schemaLocation", sourceMap.get(schemaKey));
}
private void updateWSDLImports(Element el, Map<String, String> wsdlSourceMap) {
List<Element> imps = DOMUtils.getChildrenWithName(el,
WSDLConstants.QNAME_IMPORT.getNamespaceURI(),
"import");
for (Element e : imps) {
String ns = e.getAttribute("namespace");
e.setAttribute("location", wsdlSourceMap.get(ns));
}
}
}
|
googleapis/google-cloud-java | 37,956 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/ListGoogleAdsLinksResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Response message for ListGoogleAdsLinks RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse}
*/
public final class ListGoogleAdsLinksResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse)
ListGoogleAdsLinksResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGoogleAdsLinksResponse.newBuilder() to construct.
private ListGoogleAdsLinksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGoogleAdsLinksResponse() {
googleAdsLinks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGoogleAdsLinksResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListGoogleAdsLinksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListGoogleAdsLinksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.class,
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.Builder.class);
}
public static final int GOOGLE_ADS_LINKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1alpha.GoogleAdsLink> googleAdsLinks_;
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1alpha.GoogleAdsLink> getGoogleAdsLinksList() {
return googleAdsLinks_;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder>
getGoogleAdsLinksOrBuilderList() {
return googleAdsLinks_;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
@java.lang.Override
public int getGoogleAdsLinksCount() {
return googleAdsLinks_.size();
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.GoogleAdsLink getGoogleAdsLinks(int index) {
return googleAdsLinks_.get(index);
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder getGoogleAdsLinksOrBuilder(
int index) {
return googleAdsLinks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < googleAdsLinks_.size(); i++) {
output.writeMessage(1, googleAdsLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < googleAdsLinks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, googleAdsLinks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse other =
(com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse) obj;
if (!getGoogleAdsLinksList().equals(other.getGoogleAdsLinksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGoogleAdsLinksCount() > 0) {
hash = (37 * hash) + GOOGLE_ADS_LINKS_FIELD_NUMBER;
hash = (53 * hash) + getGoogleAdsLinksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListGoogleAdsLinks RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse)
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListGoogleAdsLinksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListGoogleAdsLinksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.class,
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (googleAdsLinksBuilder_ == null) {
googleAdsLinks_ = java.util.Collections.emptyList();
} else {
googleAdsLinks_ = null;
googleAdsLinksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListGoogleAdsLinksResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse build() {
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse buildPartial() {
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse result =
new com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse result) {
if (googleAdsLinksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
googleAdsLinks_ = java.util.Collections.unmodifiableList(googleAdsLinks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.googleAdsLinks_ = googleAdsLinks_;
} else {
result.googleAdsLinks_ = googleAdsLinksBuilder_.build();
}
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse) {
return mergeFrom((com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse other) {
if (other
== com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse.getDefaultInstance())
return this;
if (googleAdsLinksBuilder_ == null) {
if (!other.googleAdsLinks_.isEmpty()) {
if (googleAdsLinks_.isEmpty()) {
googleAdsLinks_ = other.googleAdsLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.addAll(other.googleAdsLinks_);
}
onChanged();
}
} else {
if (!other.googleAdsLinks_.isEmpty()) {
if (googleAdsLinksBuilder_.isEmpty()) {
googleAdsLinksBuilder_.dispose();
googleAdsLinksBuilder_ = null;
googleAdsLinks_ = other.googleAdsLinks_;
bitField0_ = (bitField0_ & ~0x00000001);
googleAdsLinksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGoogleAdsLinksFieldBuilder()
: null;
} else {
googleAdsLinksBuilder_.addAllMessages(other.googleAdsLinks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1alpha.GoogleAdsLink m =
input.readMessage(
com.google.analytics.admin.v1alpha.GoogleAdsLink.parser(),
extensionRegistry);
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.add(m);
} else {
googleAdsLinksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1alpha.GoogleAdsLink> googleAdsLinks_ =
java.util.Collections.emptyList();
private void ensureGoogleAdsLinksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
googleAdsLinks_ =
new java.util.ArrayList<com.google.analytics.admin.v1alpha.GoogleAdsLink>(
googleAdsLinks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.GoogleAdsLink,
com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder,
com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder>
googleAdsLinksBuilder_;
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.GoogleAdsLink>
getGoogleAdsLinksList() {
if (googleAdsLinksBuilder_ == null) {
return java.util.Collections.unmodifiableList(googleAdsLinks_);
} else {
return googleAdsLinksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public int getGoogleAdsLinksCount() {
if (googleAdsLinksBuilder_ == null) {
return googleAdsLinks_.size();
} else {
return googleAdsLinksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public com.google.analytics.admin.v1alpha.GoogleAdsLink getGoogleAdsLinks(int index) {
if (googleAdsLinksBuilder_ == null) {
return googleAdsLinks_.get(index);
} else {
return googleAdsLinksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder setGoogleAdsLinks(
int index, com.google.analytics.admin.v1alpha.GoogleAdsLink value) {
if (googleAdsLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.set(index, value);
onChanged();
} else {
googleAdsLinksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder setGoogleAdsLinks(
int index, com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder builderForValue) {
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.set(index, builderForValue.build());
onChanged();
} else {
googleAdsLinksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder addGoogleAdsLinks(com.google.analytics.admin.v1alpha.GoogleAdsLink value) {
if (googleAdsLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.add(value);
onChanged();
} else {
googleAdsLinksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder addGoogleAdsLinks(
int index, com.google.analytics.admin.v1alpha.GoogleAdsLink value) {
if (googleAdsLinksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.add(index, value);
onChanged();
} else {
googleAdsLinksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder addGoogleAdsLinks(
com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder builderForValue) {
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.add(builderForValue.build());
onChanged();
} else {
googleAdsLinksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder addGoogleAdsLinks(
int index, com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder builderForValue) {
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.add(index, builderForValue.build());
onChanged();
} else {
googleAdsLinksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder addAllGoogleAdsLinks(
java.lang.Iterable<? extends com.google.analytics.admin.v1alpha.GoogleAdsLink> values) {
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, googleAdsLinks_);
onChanged();
} else {
googleAdsLinksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder clearGoogleAdsLinks() {
if (googleAdsLinksBuilder_ == null) {
googleAdsLinks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
googleAdsLinksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public Builder removeGoogleAdsLinks(int index) {
if (googleAdsLinksBuilder_ == null) {
ensureGoogleAdsLinksIsMutable();
googleAdsLinks_.remove(index);
onChanged();
} else {
googleAdsLinksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder getGoogleAdsLinksBuilder(
int index) {
return getGoogleAdsLinksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder getGoogleAdsLinksOrBuilder(
int index) {
if (googleAdsLinksBuilder_ == null) {
return googleAdsLinks_.get(index);
} else {
return googleAdsLinksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public java.util.List<? extends com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder>
getGoogleAdsLinksOrBuilderList() {
if (googleAdsLinksBuilder_ != null) {
return googleAdsLinksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(googleAdsLinks_);
}
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder addGoogleAdsLinksBuilder() {
return getGoogleAdsLinksFieldBuilder()
.addBuilder(com.google.analytics.admin.v1alpha.GoogleAdsLink.getDefaultInstance());
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder addGoogleAdsLinksBuilder(
int index) {
return getGoogleAdsLinksFieldBuilder()
.addBuilder(index, com.google.analytics.admin.v1alpha.GoogleAdsLink.getDefaultInstance());
}
/**
*
*
* <pre>
* List of GoogleAdsLinks.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.GoogleAdsLink google_ads_links = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder>
getGoogleAdsLinksBuilderList() {
return getGoogleAdsLinksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.GoogleAdsLink,
com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder,
com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder>
getGoogleAdsLinksFieldBuilder() {
if (googleAdsLinksBuilder_ == null) {
googleAdsLinksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.GoogleAdsLink,
com.google.analytics.admin.v1alpha.GoogleAdsLink.Builder,
com.google.analytics.admin.v1alpha.GoogleAdsLinkOrBuilder>(
googleAdsLinks_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
googleAdsLinks_ = null;
}
return googleAdsLinksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse)
private static final com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse();
}
public static com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGoogleAdsLinksResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGoogleAdsLinksResponse>() {
@java.lang.Override
public ListGoogleAdsLinksResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGoogleAdsLinksResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGoogleAdsLinksResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListGoogleAdsLinksResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jclouds | 38,147 | apis/s3/src/main/java/org/jclouds/s3/S3Client.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.s3;
import static com.google.common.net.HttpHeaders.EXPECT;
import static org.jclouds.blobstore.attr.BlobScopes.CONTAINER;
import java.io.Closeable;
import java.util.Map;
import java.util.Set;
import jakarta.inject.Named;
import jakarta.ws.rs.DELETE;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.HEAD;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.PUT;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.MediaType;
import org.jclouds.Fallbacks.VoidOnNotFoundOr404;
import org.jclouds.blobstore.BlobStoreFallbacks.FalseOnContainerNotFound;
import org.jclouds.blobstore.BlobStoreFallbacks.FalseOnKeyNotFound;
import org.jclouds.blobstore.BlobStoreFallbacks.NullOnKeyNotFound;
import org.jclouds.blobstore.BlobStoreFallbacks.ThrowContainerNotFoundOn404;
import org.jclouds.blobstore.BlobStoreFallbacks.ThrowKeyNotFoundOn404;
import org.jclouds.blobstore.attr.BlobScope;
import org.jclouds.http.functions.ParseETagHeader;
import org.jclouds.http.options.GetOptions;
import org.jclouds.io.Payload;
import org.jclouds.javax.annotation.Nullable;
import org.jclouds.rest.annotations.BinderParam;
import org.jclouds.rest.annotations.Endpoint;
import org.jclouds.rest.annotations.EndpointParam;
import org.jclouds.rest.annotations.Fallback;
import org.jclouds.rest.annotations.Headers;
import org.jclouds.rest.annotations.ParamParser;
import org.jclouds.rest.annotations.ParamValidators;
import org.jclouds.rest.annotations.QueryParams;
import org.jclouds.rest.annotations.RequestFilters;
import org.jclouds.rest.annotations.ResponseParser;
import org.jclouds.rest.annotations.VirtualHost;
import org.jclouds.rest.annotations.XMLResponseParser;
import org.jclouds.s3.S3Fallbacks.TrueOn404OrNotFoundFalseOnIllegalState;
import org.jclouds.s3.binders.BindACLToXMLPayload;
import org.jclouds.s3.binders.BindAsHostPrefixIfConfigured;
import org.jclouds.s3.binders.BindBucketLoggingToXmlPayload;
import org.jclouds.s3.binders.BindCannedAclToRequest;
import org.jclouds.s3.binders.BindIterableAsPayloadToDeleteRequest;
import org.jclouds.s3.binders.BindNoBucketLoggingToXmlPayload;
import org.jclouds.s3.binders.BindObjectMetadataToRequest;
import org.jclouds.s3.binders.BindOwnershipControlsToXMLPayload;
import org.jclouds.s3.binders.BindPartIdsAndETagsToRequest;
import org.jclouds.s3.binders.BindPayerToXmlPayload;
import org.jclouds.s3.binders.BindPublicAccessBlockConfigurationToXMLPayload;
import org.jclouds.s3.binders.BindS3ObjectMetadataToRequest;
import org.jclouds.s3.domain.AccessControlList;
import org.jclouds.s3.domain.BucketLogging;
import org.jclouds.s3.domain.BucketMetadata;
import org.jclouds.s3.domain.CannedAccessPolicy;
import org.jclouds.s3.domain.DeleteResult;
import org.jclouds.s3.domain.ListBucketResponse;
import org.jclouds.s3.domain.ListMultipartUploadResponse;
import org.jclouds.s3.domain.ListMultipartUploadsResponse;
import org.jclouds.s3.domain.ObjectMetadata;
import org.jclouds.s3.domain.Payer;
import org.jclouds.s3.domain.PublicAccessBlockConfiguration;
import org.jclouds.s3.domain.S3Object;
import org.jclouds.s3.fallbacks.FalseIfBucketAlreadyOwnedByYouOrOperationAbortedWhenBucketExists;
import org.jclouds.s3.filters.RequestAuthorizeSignature;
import org.jclouds.s3.functions.AssignCorrectHostnameForBucket;
import org.jclouds.s3.functions.BindRegionToXmlPayload;
import org.jclouds.s3.functions.DefaultEndpointThenInvalidateRegion;
import org.jclouds.s3.functions.ETagFromHttpResponseViaRegex;
import org.jclouds.s3.functions.ObjectKey;
import org.jclouds.s3.functions.ObjectMetadataKey;
import org.jclouds.s3.functions.ParseObjectFromHeadersAndHttpContent;
import org.jclouds.s3.functions.ParseObjectMetadataFromHeaders;
import org.jclouds.s3.functions.UploadIdFromHttpResponseViaRegex;
import org.jclouds.s3.options.CopyObjectOptions;
import org.jclouds.s3.options.ListBucketOptions;
import org.jclouds.s3.options.PutBucketOptions;
import org.jclouds.s3.options.PutObjectOptions;
import org.jclouds.s3.predicates.validators.BucketNameValidator;
import org.jclouds.s3.xml.AccessControlListHandler;
import org.jclouds.s3.xml.BucketLoggingHandler;
import org.jclouds.s3.xml.CopyObjectHandler;
import org.jclouds.s3.xml.DeleteResultHandler;
import org.jclouds.s3.xml.ListAllMyBucketsHandler;
import org.jclouds.s3.xml.ListBucketHandler;
import org.jclouds.s3.xml.ListMultipartUploadsHandler;
import org.jclouds.s3.xml.LocationConstraintHandler;
import org.jclouds.s3.xml.PartIdsFromHttpResponse;
import org.jclouds.s3.xml.PartIdsFromHttpResponseFull;
import org.jclouds.s3.xml.PayerHandler;
import com.google.common.annotations.Beta;
import com.google.inject.Provides;
/**
* Provides access to S3 via their REST API.
*/
@RequestFilters(RequestAuthorizeSignature.class)
@BlobScope(CONTAINER)
public interface S3Client extends Closeable {
/**
* Creates a default implementation of S3Object
*/
@Provides
S3Object newS3Object();
/**
* Retrieves the S3Object associated with the Key or KeyNotFoundException if not available;
*
* <p/>
* To use GET, you must have READ access to the object. If READ access is granted to the
* anonymous user, you can request the object without an authorization header.
*
* <p />
* This command allows you to specify {@link GetOptions} to control delivery of content.
*
* <h2>Note</h2>
* If you specify any of the below options, you will receive partial content:
* <ul>
* <li>{@link GetOptions#range}</li>
* <li>{@link GetOptions#startAt}</li>
* <li>{@link GetOptions#tail}</li>
* </ul>
*
* @param bucketName
* namespace of the object you are retrieving
* @param key
* unique key in the s3Bucket identifying the object
* @return Future reference to a fully populated S3Object including data stored in S3
* or null if not present.
*
* @throws org.jclouds.http.HttpResponseException
* if the conditions requested set were not satisfied by the object on the server.
*/
@Named("GetObject")
@GET
@Path("/{key}")
@Fallback(NullOnKeyNotFound.class)
@ResponseParser(ParseObjectFromHeadersAndHttpContent.class)
S3Object getObject(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, GetOptions... options);
/**
* Retrieves the {@link org.jclouds.s3.domain.internal.BucketListObjectMetadata metadata} of
* the object associated with the key or null if not available.
*
* <p/>
* The HEAD operation is used to retrieve information about a specific object or object size,
* without actually fetching the object itself. This is useful if you're only interested in the
* object metadata, and don't want to waste bandwidth on the object data.
*
*
* @param bucketName namespace of the metadata you are retrieving
* @param key unique key in the s3Bucket identifying the object
* @return metadata associated with the key or null if not present.
*/
@Named("GetObject")
@HEAD
@Path("/{key}")
@Fallback(NullOnKeyNotFound.class)
@ResponseParser(ParseObjectMetadataFromHeaders.class)
ObjectMetadata headObject(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key);
@Named("GetObject")
@HEAD
@Path("/{key}")
@Fallback(FalseOnKeyNotFound.class)
boolean objectExists(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key);
/**
* Removes the object and metadata associated with the key.
* <p/>
* The DELETE request operation removes the specified object from Amazon S3. Once deleted, there
* is no method to restore or undelete an object.
*
*
* @param bucketName
* namespace of the object you are deleting
* @param key
* unique key in the s3Bucket identifying the object
* @throws org.jclouds.http.HttpResponseException
* if the bucket is not available
*/
@Named("DeleteObject")
@DELETE
@Path("/{key}")
@Fallback(VoidOnNotFoundOr404.class)
void deleteObject(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key);
/**
* The Multi-Object Delete operation enables you to delete multiple objects from a bucket using a
* single HTTP request. If you know the object keys that you want to delete, then this operation
* provides a suitable alternative to sending individual delete requests (see DELETE Object),
* reducing per-request overhead.
*
* The Multi-Object Delete request contains a set of up to 1000 keys that you want to delete.
*
* If a key does not exist is considered to be deleted.
*
* The Multi-Object Delete operation supports two modes for the response; verbose and quiet.
* By default, the operation uses verbose mode in which the response includes the result of
* deletion of each key in your request.
*
* @param bucketName
* namespace of the objects you are deleting
* @param keys
* set of unique keys identifying objects
*/
@Named("DeleteObject")
@POST
@Path("/")
@QueryParams(keys = "delete")
@XMLResponseParser(DeleteResultHandler.class)
DeleteResult deleteObjects(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindIterableAsPayloadToDeleteRequest.class) Iterable<String> keys);
/**
* Store data by creating or overwriting an object.
* <p/>
* This method will store the object with the default <code>private</code acl.
*
* <p/>
* This returns a byte[] of the eTag hash of what Amazon S3 received
* <p />
*
* @param bucketName
* namespace of the object you are storing
* @param object
* contains the data and metadata to create or overwrite
* @param options
* options for creating the object
* @return ETag of the content uploaded
* @throws org.jclouds.http.HttpResponseException
* if the conditions requested set are not satisfied by the object on the server.
* @see org.jclouds.s3.domain.CannedAccessPolicy#PRIVATE
*/
@Named("PutObject")
@PUT
@Path("/{key}")
@Headers(keys = EXPECT, values = "100-continue")
@ResponseParser(ParseETagHeader.class)
String putObject(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") @ParamParser(ObjectKey.class) @BinderParam(BindS3ObjectMetadataToRequest.class)
S3Object object, PutObjectOptions... options);
/**
* Create and name your own bucket in which to store your objects.
*
* <p/>
* you can use {@link PutBucketOptions} to create the bucket in EU.
* <p/>
* The PUT request operation with a bucket URI creates a new bucket. Depending on your latency
* and legal requirements, you can specify a location constraint that will affect where your data
* physically resides. You can currently specify a Europe (EU) location constraint via
* {@link PutBucketOptions}.
*
* @param options
* for creating your bucket
* @return true, if the bucket was created or false, if the container was already present
*
* @see PutBucketOptions
*/
@Named("CreateBucket")
@PUT
@Path("/")
@Endpoint(Bucket.class)
@Fallback(FalseIfBucketAlreadyOwnedByYouOrOperationAbortedWhenBucketExists.class)
boolean putBucketInRegion(@BinderParam(BindRegionToXmlPayload.class) @Nullable String region,
@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class)
@BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class)
String bucketName, PutBucketOptions... options);
/**
* Deletes the bucket, if it is empty.
* <p/>
* The DELETE request operation deletes the bucket named in the URI. All objects in the bucket
* must be deleted before the bucket itself can be deleted.
* <p />
* Only the owner of a bucket can delete it, regardless of the bucket's access control policy.
*
*
* @param bucketName what to delete
* @return false, if the bucket was not empty and therefore not deleted
*/
@Named("DeleteBucket")
@DELETE
@Path("/")
@Fallback(TrueOn404OrNotFoundFalseOnIllegalState.class)
boolean deleteBucketIfEmpty(@Bucket @EndpointParam(parser = DefaultEndpointThenInvalidateRegion.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* Issues a HEAD command to determine if the bucket exists or not.
*/
@Named("BucketExists")
@HEAD
@Path("/")
@Fallback(FalseOnContainerNotFound.class)
boolean bucketExists(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* Retrieve a {@code S3Bucket} listing. A GET request operation using a bucket URI lists
* information about the objects in the bucket. You can use {@link ListBucketOptions} to control
* the amount of S3Objects to return.
* <p />
* To list the keys of a bucket, you must have READ access to the bucket.
* <p/>
*
* @param bucketName namespace of the objects you wish to list
* @return potentially empty or partial list of the bucket.
* @see ListBucketOptions
*/
@Named("ListBucket")
@GET
@Path("/")
@XMLResponseParser(ListBucketHandler.class)
ListBucketResponse listBucket(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
ListBucketOptions... options);
/**
* Returns a list of all of the buckets owned by the authenticated sender of the request.
*
* @return list of all of the buckets owned by the authenticated sender of the request.
*/
@Named("ListAllMyBuckets")
@GET
@XMLResponseParser(ListAllMyBucketsHandler.class)
@Path("/")
@VirtualHost
Set<BucketMetadata> listOwnedBuckets();
/**
* Copies one object to another bucket, retaining UserMetadata from the source. The destination
* will have a private acl. The copy operation creates a copy of an object that is already stored
* in Amazon S3.
* <p/>
* When copying an object, you can preserve all metadata (default) or
* {@link CopyObjectOptions#overrideMetadataWith(java.util.Map)} specify new
* metadata}. However, the ACL is not preserved and is set to private for the user making the
* request. To override the default ACL setting,
* {@link CopyObjectOptions#overrideAcl(org.jclouds.s3.domain.CannedAccessPolicy) specify a
* new ACL} when generating a copy request.
*
* @return metadata populated with lastModified and eTag of the new object
* @throws org.jclouds.http.HttpResponseException
* if the conditions requested set are not satisfied by the object on the server.
* @see CopyObjectOptions
* @see org.jclouds.s3.domain.CannedAccessPolicy
*/
@Named("PutObject")
@PUT
@Path("/{destinationObject}")
@Headers(keys = "x-amz-copy-source", values = "/{sourceBucket}/{sourceObject}", urlEncode = true)
@XMLResponseParser(CopyObjectHandler.class)
ObjectMetadata copyObject(@PathParam("sourceBucket") String sourceBucket,
@PathParam("sourceObject") String sourceObject,
@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String destinationBucket,
@PathParam("destinationObject") String destinationObject, CopyObjectOptions... options);
/**
*
* A GET request operation directed at an object or bucket URI with the "acl" parameter retrieves
* the Access Control List (ACL) settings for that S3 item.
* <p />
* To list a bucket's ACL, you must have READ_ACP access to the item.
*
* @return access permissions of the bucket
*/
@Named("GetBucketAcl")
@GET
@QueryParams(keys = "acl")
@XMLResponseParser(AccessControlListHandler.class)
@Fallback(ThrowContainerNotFoundOn404.class)
@Path("/")
AccessControlList getBucketACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* Update a bucket's Access Control List settings.
* <p/>
* A PUT request operation directed at a bucket URI with the "acl" parameter sets the Access
* Control List (ACL) settings for that S3 item.
* <p />
* To set a bucket or object's ACL, you must have WRITE_ACP or FULL_CONTROL access to the item.
*
* @param bucketName
* the bucket whose Access Control List settings will be updated.
* @param acl
* the ACL to apply to the bucket. This acl object <strong>must</strong include a valid
* owner identifier string in {@link AccessControlList#getOwner()}.
* @return true if the bucket's Access Control List was updated successfully.
*/
@Named("PutBucketAcl")
@PUT
@Path("/")
@QueryParams(keys = "acl")
boolean putBucketACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindACLToXMLPayload.class) AccessControlList acl);
/**
* Update a bucket's Access Control List settings.
* <p/>
* A PUT request operation directed at a bucket URI with the "acl" parameter sets the Access
* Control List (ACL) settings for that S3 item.
* <p />
* To set a bucket or object's ACL, you must have WRITE_ACP or FULL_CONTROL access to the item.
*
* @param bucketName
* the bucket whose Access Control List settings will be updated.
* @param acl
* the ACL to apply to the bucket.
* @return true if the bucket's Access Control List was updated successfully.
*/
@Named("UpdateBucketCannedAcl")
@PUT
@Path("/")
@QueryParams(keys = "acl")
boolean updateBucketCannedACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindCannedAclToRequest.class) CannedAccessPolicy acl);
/**
* A GET request operation directed at an object or bucket URI with the "acl" parameter retrieves
* the Access Control List (ACL) settings for that S3 item.
* <p />
* To list a object's ACL, you must have READ_ACP access to the item.
*
* @return access permissions of the object
*/
@Named("GetObjectAcl")
@GET
@QueryParams(keys = "acl")
@Path("/{key}")
@XMLResponseParser(AccessControlListHandler.class)
@Fallback(ThrowKeyNotFoundOn404.class)
AccessControlList getObjectACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key);
/**
* Update an object's Access Control List settings.
* <p/>
* A PUT request operation directed at an object URI with the "acl" parameter sets the Access
* Control List (ACL) settings for that S3 item.
* <p />
* To set a bucket or object's ACL, you must have WRITE_ACP or FULL_CONTROL access to the item.
*
* @param bucketName
* the bucket containing the object to be updated
* @param key
* the key of the object whose Access Control List settings will be updated.
* @param acl
* the ACL to apply to the object. This acl object <strong>must</strong include a valid
* owner identifier string in {@link AccessControlList#getOwner()}.
* @return true if the object's Access Control List was updated successfully.
*/
@Named("PutObjectAcl")
@PUT
@QueryParams(keys = "acl")
@Path("/{key}")
boolean putObjectACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @BinderParam(BindACLToXMLPayload.class) AccessControlList acl);
/**
* Update an object's Access Control List settings.
* <p/>
* A PUT request operation directed at an object URI with the "acl" parameter sets the Access
* Control List (ACL) settings for that S3 item.
* <p />
* To set a bucket or object's ACL, you must have WRITE_ACP or FULL_CONTROL access to the item.
*
* @param bucketName
* the bucket containing the object to be updated
* @param key
* the key of the object whose Access Control List settings will be updated.
* @param acl
* the ACL to apply to the object.
* @return true if the object's Access Control List was updated successfully.
*/
@Named("UpdateObjectCannedAcl")
@PUT
@QueryParams(keys = "acl")
@Path("/{key}")
boolean updateObjectCannedACL(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @BinderParam(BindCannedAclToRequest.class) CannedAccessPolicy acl);
/**
* A GET location request operation using a bucket URI lists the location constraint of the
* bucket.
* <p/>
* To view the location constraint of a bucket, you must be the bucket owner.
*
* @param bucketName
* the bucket you wish to know where exists
*
* @return location of the bucket
*/
@Named("GetBucketLocation")
@GET
@QueryParams(keys = "location")
@Path("/{bucket}")
@Endpoint(Bucket.class)
@XMLResponseParser(LocationConstraintHandler.class)
String getBucketLocation(@Bucket @PathParam("bucket") @ParamValidators(
BucketNameValidator.class) String bucketName);
/**
* A GET request operation on a requestPayment resource returns the request payment configuration
* of a bucket.
* <p/>
* Only the bucket owner has permissions to get this value.
*
* @param bucketName
* the bucket you wish to know the payer status
*
* @return {@link Payer#REQUESTER} for a Requester Pays bucket, and {@link Payer#BUCKET_OWNER},
* for a normal bucket.
*/
@Named("GetBucketRequestPayment")
@GET
@QueryParams(keys = "requestPayment")
@Path("/")
@XMLResponseParser(PayerHandler.class)
Payer getBucketPayer(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* The PUT request operation with a requestPayment URI configures an existing bucket to be
* Requester Pays or not. To make a bucket a Requester Pays bucket, make the Payer value
* Requester. Otherwise, make the value BucketOwner.
* <p/>
* Only a bucket owner is allowed to configure a bucket. As a result any requests for this
* resource should be signed with the bucket owner's credentials. Anonymous requests are never
* allowed to create Requester Pays buckets.
*
* @param bucketName
* the bucket you wish to know the payer status
*
* @param payer
* {@link Payer#REQUESTER} for a Requester Pays bucket, and {@link Payer#BUCKET_OWNER},
* for a normal bucket.
*/
@Named("PutBucketRequestPayment")
@PUT
@QueryParams(keys = "requestPayment")
@Path("/")
void setBucketPayer(
@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindPayerToXmlPayload.class) Payer payer);
/**
* Inspects the logging status for a bucket.
*
*
* @param bucketName
* the bucket you wish to know the logging status
* @return bucketLogging configuration or null, if not configured
*/
@Named("GetBucketLogging")
@GET
@QueryParams(keys = "logging")
@XMLResponseParser(BucketLoggingHandler.class)
@Fallback(ThrowContainerNotFoundOn404.class)
@Path("/")
BucketLogging getBucketLogging(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* Enables logging for a bucket.
*
* @param bucketName
* the bucket you wish to enable logging for
* @param logging
* configuration including destination, prefix, and access rules
*/
@Named("PutBucketLogging")
@PUT
@Path("/")
@QueryParams(keys = "logging")
void enableBucketLogging(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindBucketLoggingToXmlPayload.class) BucketLogging logging);
/**
* Disables logging for a bucket.
*
* @param bucketName
* the bucket you wish to disable logging for
*/
@Named("PutBucketLogging")
@PUT
@Path("/")
@QueryParams(keys = "logging")
@Produces(MediaType.TEXT_XML)
void disableBucketLogging(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindNoBucketLoggingToXmlPayload.class) @ParamValidators(BucketNameValidator.class) String bucketName);
/**
* This operation initiates a multipart upload and returns an upload ID. This upload ID is used
* to associate all the parts in the specific multipart upload. You specify this upload ID in
* each of your subsequent upload part requests (see Upload Part). You also include this upload
* ID in the final request to either complete or abort the multipart upload request.
*
* <h4>Note</h4> If you create an object using the multipart upload APIs, currently you cannot
* copy the object between regions.
*
*
* @param bucketName
* namespace of the object you are to upload
* @param objectMetadata
* metadata around the object you wish to upload
* @param options
* controls optional parameters such as canned ACL
* @return ID for the initiated multipart upload.
*/
@Named("PutObject")
@POST
@QueryParams(keys = "uploads")
@Path("/{key}")
@ResponseParser(UploadIdFromHttpResponseViaRegex.class)
String initiateMultipartUpload(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") @ParamParser(ObjectMetadataKey.class) @BinderParam(BindObjectMetadataToRequest.class)
ObjectMetadata objectMetadata, PutObjectOptions... options);
/**
* This operation aborts a multipart upload. After a multipart upload is aborted, no additional
* parts can be uploaded using that upload ID. The storage consumed by any previously uploaded
* parts will be freed. However, if any part uploads are currently in progress, those part
* uploads might or might not succeed. As a result, it might be necessary to abort a given
* multipart upload multiple times in order to completely free all storage consumed by all parts.
*
*
* @param bucketName
* namespace of the object you are deleting
* @param key
* unique key in the s3Bucket identifying the object
* @param uploadId
* id of the multipart upload in progress.
*/
@Named("AbortMultipartUpload")
@DELETE
@Path("/{key}")
@Fallback(VoidOnNotFoundOr404.class)
void abortMultipartUpload(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("uploadId") String uploadId);
/**
* This operation uploads a part in a multipart upload. You must initiate a multipart upload (see
* Initiate Multipart Upload) before you can upload any part. In response to your initiate
* request. Amazon S3 returns an upload ID, a unique identifier, that you must include in your
* upload part request.
*
* <p/>
* Part numbers can be any number from 1 to 10,000, inclusive. A part number uniquely identifies
* a part and also defines its position within the object being created. If you upload a new part
* using the same part number that was used with a previous part, the previously uploaded part is
* overwritten. Each part must be at least 5 MB in size, except the last part. There is no size
* limit on the last part of your multipart upload.
*
* <p/>
* To ensure that data is not corrupted when traversing the network, specify the Content-MD5
* header in the upload part request. Amazon S3 checks the part data against the provided MD5
* value. If they do not match, Amazon S3 returns an error.
*
*
* @param bucketName
* namespace of the object you are storing
* @param key
* unique key in the s3Bucket identifying the object
* @param partNumber
* which part is this.
* @param uploadId
* id of the multipart upload in progress.
* @param part
* contains the data to create or overwrite
* @return ETag of the content uploaded
*/
@Named("PutObject")
@PUT
@Path("/{key}")
@ResponseParser(ParseETagHeader.class)
String uploadPart(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("partNumber") int partNumber,
@QueryParam("uploadId") String uploadId, Payload part);
@Named("UploadPartCopy")
@PUT
@Path("/{key}")
@Headers(keys = {"x-amz-copy-source", "x-amz-copy-source-range"}, values = {"/{sourceBucket}/{sourceObject}", "bytes={startOffset}-{endOffset}"}, urlEncode = {true, false})
@ResponseParser(ETagFromHttpResponseViaRegex.class)
String uploadPartCopy(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("partNumber") int partNumber,
@QueryParam("uploadId") String uploadId,
@PathParam("sourceBucket") String sourceBucket, @PathParam("sourceObject") String sourceObject,
@PathParam("startOffset") long startOffset, @PathParam("endOffset") long endOffset);
/**
*
This operation completes a multipart upload by assembling previously uploaded parts.
* <p/>
* You first initiate the multipart upload and then upload all parts using the Upload Parts
* operation (see Upload Part). After successfully uploading all relevant parts of an upload, you
* call this operation to complete the upload. Upon receiving this request, Amazon S3
* concatenates all the parts in ascending order by part number to create a new object. In the
* Complete Multipart Upload request, you must provide the parts list. For each part in the list,
* you must provide the part number and the ETag header value, returned after that part was
* uploaded.
* <p/>
* Processing of a Complete Multipart Upload request could take several minutes to complete.
* After Amazon S3 begins processing the request, it sends an HTTP response header that specifies
* a 200 OK response. While processing is in progress, Amazon S3 periodically sends whitespace
* characters to keep the connection from timing out. Because a request could fail after the
* initial 200 OK response has been sent, it is important that you check the response body to
* determine whether the request succeeded.
* <p/>
* Note that if Complete Multipart Upload fails, applications should be prepared to retry the
* failed requests.
*
* @param bucketName
* namespace of the object you are deleting
* @param key
* unique key in the s3Bucket identifying the object
* @param uploadId
* id of the multipart upload in progress.
* @param parts
* a map of part id to eTag from the {@link #uploadPart} command.
* @return ETag of the content uploaded
*/
@Named("PutObject")
@POST
@Path("/{key}")
@ResponseParser(ETagFromHttpResponseViaRegex.class)
String completeMultipartUpload(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(
BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("uploadId") String uploadId,
@BinderParam(BindPartIdsAndETagsToRequest.class) Map<Integer, String> parts);
/** @deprecated see #listMultipartPartsFull */
@Deprecated
@Named("ListMultipartParts")
@GET
@Path("/{key}")
@XMLResponseParser(PartIdsFromHttpResponse.class)
Map<Integer, String> listMultipartParts(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class)
@BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("uploadId") String uploadId);
@Beta
@Named("ListMultipartParts")
@GET
@Path("/{key}")
@XMLResponseParser(PartIdsFromHttpResponseFull.class)
Map<Integer, ListMultipartUploadResponse> listMultipartPartsFull(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class)
@BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@PathParam("key") String key, @QueryParam("uploadId") String uploadId);
@Named("ListMultipartUploads")
@GET
@Path("/")
@QueryParams(keys = "uploads")
@XMLResponseParser(ListMultipartUploadsHandler.class)
ListMultipartUploadsResponse listMultipartUploads(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class)
@BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@QueryParam("delimiter") @Nullable String delimiter, @QueryParam("max-uploads") @Nullable Integer maxUploads,
@QueryParam("key-marker") @Nullable String keyMarker, @QueryParam("prefix") @Nullable String prefix,
@QueryParam("upload-id-marker") @Nullable String uploadIdMarker);
@Named("PutBucketOwnershipControls")
@PUT
@Path("/")
@QueryParams(keys = "ownershipControls")
void putBucketOwnershipControls(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
// BucketOwnerPreferred | ObjectWriter | BucketOwnerEnforced
@BinderParam(BindOwnershipControlsToXMLPayload.class) String objectOwnership);
@Named("PutPublicAccessBlock")
@PUT
@Path("/")
@QueryParams(keys = "publicAccessBlock")
void putPublicAccessBlock(@Bucket @EndpointParam(parser = AssignCorrectHostnameForBucket.class) @BinderParam(BindAsHostPrefixIfConfigured.class) @ParamValidators(BucketNameValidator.class) String bucketName,
@BinderParam(BindPublicAccessBlockConfigurationToXMLPayload.class) PublicAccessBlockConfiguration configuration);
}
|
googleapis/google-cloud-java | 37,979 | java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/QueryDirectoryContentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1;
/**
*
*
* <pre>
* `QueryDirectoryContents` response message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.QueryDirectoryContentsResponse}
*/
public final class QueryDirectoryContentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.QueryDirectoryContentsResponse)
QueryDirectoryContentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use QueryDirectoryContentsResponse.newBuilder() to construct.
private QueryDirectoryContentsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QueryDirectoryContentsResponse() {
directoryEntries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QueryDirectoryContentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_QueryDirectoryContentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_QueryDirectoryContentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.class,
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.Builder.class);
}
public static final int DIRECTORY_ENTRIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dataform.v1.DirectoryEntry> directoryEntries_;
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dataform.v1.DirectoryEntry> getDirectoryEntriesList() {
return directoryEntries_;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dataform.v1.DirectoryEntryOrBuilder>
getDirectoryEntriesOrBuilderList() {
return directoryEntries_;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
@java.lang.Override
public int getDirectoryEntriesCount() {
return directoryEntries_.size();
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.DirectoryEntry getDirectoryEntries(int index) {
return directoryEntries_.get(index);
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.DirectoryEntryOrBuilder getDirectoryEntriesOrBuilder(
int index) {
return directoryEntries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < directoryEntries_.size(); i++) {
output.writeMessage(1, directoryEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < directoryEntries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, directoryEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1.QueryDirectoryContentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse other =
(com.google.cloud.dataform.v1.QueryDirectoryContentsResponse) obj;
if (!getDirectoryEntriesList().equals(other.getDirectoryEntriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDirectoryEntriesCount() > 0) {
hash = (37 * hash) + DIRECTORY_ENTRIES_FIELD_NUMBER;
hash = (53 * hash) + getDirectoryEntriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `QueryDirectoryContents` response message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.QueryDirectoryContentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.QueryDirectoryContentsResponse)
com.google.cloud.dataform.v1.QueryDirectoryContentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_QueryDirectoryContentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_QueryDirectoryContentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.class,
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.Builder.class);
}
// Construct using com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (directoryEntriesBuilder_ == null) {
directoryEntries_ = java.util.Collections.emptyList();
} else {
directoryEntries_ = null;
directoryEntriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_QueryDirectoryContentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1.QueryDirectoryContentsResponse getDefaultInstanceForType() {
return com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1.QueryDirectoryContentsResponse build() {
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1.QueryDirectoryContentsResponse buildPartial() {
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse result =
new com.google.cloud.dataform.v1.QueryDirectoryContentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dataform.v1.QueryDirectoryContentsResponse result) {
if (directoryEntriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
directoryEntries_ = java.util.Collections.unmodifiableList(directoryEntries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.directoryEntries_ = directoryEntries_;
} else {
result.directoryEntries_ = directoryEntriesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dataform.v1.QueryDirectoryContentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1.QueryDirectoryContentsResponse) {
return mergeFrom((com.google.cloud.dataform.v1.QueryDirectoryContentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1.QueryDirectoryContentsResponse other) {
if (other == com.google.cloud.dataform.v1.QueryDirectoryContentsResponse.getDefaultInstance())
return this;
if (directoryEntriesBuilder_ == null) {
if (!other.directoryEntries_.isEmpty()) {
if (directoryEntries_.isEmpty()) {
directoryEntries_ = other.directoryEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDirectoryEntriesIsMutable();
directoryEntries_.addAll(other.directoryEntries_);
}
onChanged();
}
} else {
if (!other.directoryEntries_.isEmpty()) {
if (directoryEntriesBuilder_.isEmpty()) {
directoryEntriesBuilder_.dispose();
directoryEntriesBuilder_ = null;
directoryEntries_ = other.directoryEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
directoryEntriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDirectoryEntriesFieldBuilder()
: null;
} else {
directoryEntriesBuilder_.addAllMessages(other.directoryEntries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataform.v1.DirectoryEntry m =
input.readMessage(
com.google.cloud.dataform.v1.DirectoryEntry.parser(), extensionRegistry);
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
directoryEntries_.add(m);
} else {
directoryEntriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dataform.v1.DirectoryEntry> directoryEntries_ =
java.util.Collections.emptyList();
private void ensureDirectoryEntriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
directoryEntries_ =
new java.util.ArrayList<com.google.cloud.dataform.v1.DirectoryEntry>(directoryEntries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.DirectoryEntry,
com.google.cloud.dataform.v1.DirectoryEntry.Builder,
com.google.cloud.dataform.v1.DirectoryEntryOrBuilder>
directoryEntriesBuilder_;
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.DirectoryEntry> getDirectoryEntriesList() {
if (directoryEntriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(directoryEntries_);
} else {
return directoryEntriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public int getDirectoryEntriesCount() {
if (directoryEntriesBuilder_ == null) {
return directoryEntries_.size();
} else {
return directoryEntriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public com.google.cloud.dataform.v1.DirectoryEntry getDirectoryEntries(int index) {
if (directoryEntriesBuilder_ == null) {
return directoryEntries_.get(index);
} else {
return directoryEntriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder setDirectoryEntries(
int index, com.google.cloud.dataform.v1.DirectoryEntry value) {
if (directoryEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDirectoryEntriesIsMutable();
directoryEntries_.set(index, value);
onChanged();
} else {
directoryEntriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder setDirectoryEntries(
int index, com.google.cloud.dataform.v1.DirectoryEntry.Builder builderForValue) {
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
directoryEntries_.set(index, builderForValue.build());
onChanged();
} else {
directoryEntriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder addDirectoryEntries(com.google.cloud.dataform.v1.DirectoryEntry value) {
if (directoryEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDirectoryEntriesIsMutable();
directoryEntries_.add(value);
onChanged();
} else {
directoryEntriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder addDirectoryEntries(
int index, com.google.cloud.dataform.v1.DirectoryEntry value) {
if (directoryEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDirectoryEntriesIsMutable();
directoryEntries_.add(index, value);
onChanged();
} else {
directoryEntriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder addDirectoryEntries(
com.google.cloud.dataform.v1.DirectoryEntry.Builder builderForValue) {
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
directoryEntries_.add(builderForValue.build());
onChanged();
} else {
directoryEntriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder addDirectoryEntries(
int index, com.google.cloud.dataform.v1.DirectoryEntry.Builder builderForValue) {
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
directoryEntries_.add(index, builderForValue.build());
onChanged();
} else {
directoryEntriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder addAllDirectoryEntries(
java.lang.Iterable<? extends com.google.cloud.dataform.v1.DirectoryEntry> values) {
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, directoryEntries_);
onChanged();
} else {
directoryEntriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder clearDirectoryEntries() {
if (directoryEntriesBuilder_ == null) {
directoryEntries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
directoryEntriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public Builder removeDirectoryEntries(int index) {
if (directoryEntriesBuilder_ == null) {
ensureDirectoryEntriesIsMutable();
directoryEntries_.remove(index);
onChanged();
} else {
directoryEntriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public com.google.cloud.dataform.v1.DirectoryEntry.Builder getDirectoryEntriesBuilder(
int index) {
return getDirectoryEntriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public com.google.cloud.dataform.v1.DirectoryEntryOrBuilder getDirectoryEntriesOrBuilder(
int index) {
if (directoryEntriesBuilder_ == null) {
return directoryEntries_.get(index);
} else {
return directoryEntriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dataform.v1.DirectoryEntryOrBuilder>
getDirectoryEntriesOrBuilderList() {
if (directoryEntriesBuilder_ != null) {
return directoryEntriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(directoryEntries_);
}
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public com.google.cloud.dataform.v1.DirectoryEntry.Builder addDirectoryEntriesBuilder() {
return getDirectoryEntriesFieldBuilder()
.addBuilder(com.google.cloud.dataform.v1.DirectoryEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public com.google.cloud.dataform.v1.DirectoryEntry.Builder addDirectoryEntriesBuilder(
int index) {
return getDirectoryEntriesFieldBuilder()
.addBuilder(index, com.google.cloud.dataform.v1.DirectoryEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* List of entries in the directory.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.DirectoryEntry directory_entries = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.DirectoryEntry.Builder>
getDirectoryEntriesBuilderList() {
return getDirectoryEntriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.DirectoryEntry,
com.google.cloud.dataform.v1.DirectoryEntry.Builder,
com.google.cloud.dataform.v1.DirectoryEntryOrBuilder>
getDirectoryEntriesFieldBuilder() {
if (directoryEntriesBuilder_ == null) {
directoryEntriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.DirectoryEntry,
com.google.cloud.dataform.v1.DirectoryEntry.Builder,
com.google.cloud.dataform.v1.DirectoryEntryOrBuilder>(
directoryEntries_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
directoryEntries_ = null;
}
return directoryEntriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.QueryDirectoryContentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.QueryDirectoryContentsResponse)
private static final com.google.cloud.dataform.v1.QueryDirectoryContentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.QueryDirectoryContentsResponse();
}
public static com.google.cloud.dataform.v1.QueryDirectoryContentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QueryDirectoryContentsResponse> PARSER =
new com.google.protobuf.AbstractParser<QueryDirectoryContentsResponse>() {
@java.lang.Override
public QueryDirectoryContentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QueryDirectoryContentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QueryDirectoryContentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1.QueryDirectoryContentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,946 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/ListSubAccountsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/accounts.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Response message for the `ListSubAccounts` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.ListSubAccountsResponse}
*/
public final class ListSubAccountsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.ListSubAccountsResponse)
ListSubAccountsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSubAccountsResponse.newBuilder() to construct.
private ListSubAccountsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSubAccountsResponse() {
accounts_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSubAccountsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_ListSubAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_ListSubAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.class,
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.Builder.class);
}
public static final int ACCOUNTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.accounts.v1.Account> accounts_;
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.accounts.v1.Account> getAccountsList() {
return accounts_;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
getAccountsOrBuilderList() {
return accounts_;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
@java.lang.Override
public int getAccountsCount() {
return accounts_.size();
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.Account getAccounts(int index) {
return accounts_.get(index);
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.AccountOrBuilder getAccountsOrBuilder(int index) {
return accounts_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < accounts_.size(); i++) {
output.writeMessage(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < accounts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, accounts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse other =
(com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse) obj;
if (!getAccountsList().equals(other.getAccountsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAccountsCount() > 0) {
hash = (37 * hash) + ACCOUNTS_FIELD_NUMBER;
hash = (53 * hash) + getAccountsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListSubAccounts` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.ListSubAccountsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.ListSubAccountsResponse)
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_ListSubAccountsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_ListSubAccountsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.class,
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
} else {
accounts_ = null;
accountsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.AccountsProto
.internal_static_google_shopping_merchant_accounts_v1_ListSubAccountsResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse build() {
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse buildPartial() {
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse result =
new com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse result) {
if (accountsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
accounts_ = java.util.Collections.unmodifiableList(accounts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.accounts_ = accounts_;
} else {
result.accounts_ = accountsBuilder_.build();
}
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse) {
return mergeFrom((com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse other) {
if (other
== com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse.getDefaultInstance())
return this;
if (accountsBuilder_ == null) {
if (!other.accounts_.isEmpty()) {
if (accounts_.isEmpty()) {
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAccountsIsMutable();
accounts_.addAll(other.accounts_);
}
onChanged();
}
} else {
if (!other.accounts_.isEmpty()) {
if (accountsBuilder_.isEmpty()) {
accountsBuilder_.dispose();
accountsBuilder_ = null;
accounts_ = other.accounts_;
bitField0_ = (bitField0_ & ~0x00000001);
accountsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAccountsFieldBuilder()
: null;
} else {
accountsBuilder_.addAllMessages(other.accounts_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.accounts.v1.Account m =
input.readMessage(
com.google.shopping.merchant.accounts.v1.Account.parser(),
extensionRegistry);
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(m);
} else {
accountsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.accounts.v1.Account> accounts_ =
java.util.Collections.emptyList();
private void ensureAccountsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
accounts_ =
new java.util.ArrayList<com.google.shopping.merchant.accounts.v1.Account>(accounts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
accountsBuilder_;
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1.Account> getAccountsList() {
if (accountsBuilder_ == null) {
return java.util.Collections.unmodifiableList(accounts_);
} else {
return accountsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public int getAccountsCount() {
if (accountsBuilder_ == null) {
return accounts_.size();
} else {
return accountsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1.Account getAccounts(int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder setAccounts(int index, com.google.shopping.merchant.accounts.v1.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.set(index, value);
onChanged();
} else {
accountsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder setAccounts(
int index, com.google.shopping.merchant.accounts.v1.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.set(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder addAccounts(com.google.shopping.merchant.accounts.v1.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(value);
onChanged();
} else {
accountsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder addAccounts(int index, com.google.shopping.merchant.accounts.v1.Account value) {
if (accountsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountsIsMutable();
accounts_.add(index, value);
onChanged();
} else {
accountsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder addAccounts(
com.google.shopping.merchant.accounts.v1.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder addAccounts(
int index, com.google.shopping.merchant.accounts.v1.Account.Builder builderForValue) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.add(index, builderForValue.build());
onChanged();
} else {
accountsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder addAllAccounts(
java.lang.Iterable<? extends com.google.shopping.merchant.accounts.v1.Account> values) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, accounts_);
onChanged();
} else {
accountsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder clearAccounts() {
if (accountsBuilder_ == null) {
accounts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
accountsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public Builder removeAccounts(int index) {
if (accountsBuilder_ == null) {
ensureAccountsIsMutable();
accounts_.remove(index);
onChanged();
} else {
accountsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1.Account.Builder getAccountsBuilder(int index) {
return getAccountsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1.AccountOrBuilder getAccountsOrBuilder(
int index) {
if (accountsBuilder_ == null) {
return accounts_.get(index);
} else {
return accountsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
getAccountsOrBuilderList() {
if (accountsBuilder_ != null) {
return accountsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(accounts_);
}
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1.Account.Builder addAccountsBuilder() {
return getAccountsFieldBuilder()
.addBuilder(com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance());
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1.Account.Builder addAccountsBuilder(int index) {
return getAccountsFieldBuilder()
.addBuilder(index, com.google.shopping.merchant.accounts.v1.Account.getDefaultInstance());
}
/**
*
*
* <pre>
* The accounts for which the given parent account is an aggregator.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1.Account accounts = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1.Account.Builder>
getAccountsBuilderList() {
return getAccountsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>
getAccountsFieldBuilder() {
if (accountsBuilder_ == null) {
accountsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Account,
com.google.shopping.merchant.accounts.v1.Account.Builder,
com.google.shopping.merchant.accounts.v1.AccountOrBuilder>(
accounts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
accounts_ = null;
}
return accountsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.ListSubAccountsResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.ListSubAccountsResponse)
private static final com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse();
}
public static com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSubAccountsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSubAccountsResponse>() {
@java.lang.Override
public ListSubAccountsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSubAccountsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSubAccountsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.ListSubAccountsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,002 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListStoredInfoTypesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Response message for ListStoredInfoTypes.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListStoredInfoTypesResponse}
*/
public final class ListStoredInfoTypesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListStoredInfoTypesResponse)
ListStoredInfoTypesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListStoredInfoTypesResponse.newBuilder() to construct.
private ListStoredInfoTypesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListStoredInfoTypesResponse() {
storedInfoTypes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListStoredInfoTypesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListStoredInfoTypesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListStoredInfoTypesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.class,
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.Builder.class);
}
public static final int STORED_INFO_TYPES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.privacy.dlp.v2.StoredInfoType> storedInfoTypes_;
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.privacy.dlp.v2.StoredInfoType> getStoredInfoTypesList() {
return storedInfoTypes_;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder>
getStoredInfoTypesOrBuilderList() {
return storedInfoTypes_;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
@java.lang.Override
public int getStoredInfoTypesCount() {
return storedInfoTypes_.size();
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.StoredInfoType getStoredInfoTypes(int index) {
return storedInfoTypes_.get(index);
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder getStoredInfoTypesOrBuilder(int index) {
return storedInfoTypes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < storedInfoTypes_.size(); i++) {
output.writeMessage(1, storedInfoTypes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < storedInfoTypes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, storedInfoTypes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.ListStoredInfoTypesResponse)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse other =
(com.google.privacy.dlp.v2.ListStoredInfoTypesResponse) obj;
if (!getStoredInfoTypesList().equals(other.getStoredInfoTypesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getStoredInfoTypesCount() > 0) {
hash = (37 * hash) + STORED_INFO_TYPES_FIELD_NUMBER;
hash = (53 * hash) + getStoredInfoTypesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListStoredInfoTypes.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListStoredInfoTypesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListStoredInfoTypesResponse)
com.google.privacy.dlp.v2.ListStoredInfoTypesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListStoredInfoTypesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListStoredInfoTypesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.class,
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (storedInfoTypesBuilder_ == null) {
storedInfoTypes_ = java.util.Collections.emptyList();
} else {
storedInfoTypes_ = null;
storedInfoTypesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListStoredInfoTypesResponse_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListStoredInfoTypesResponse getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListStoredInfoTypesResponse build() {
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListStoredInfoTypesResponse buildPartial() {
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse result =
new com.google.privacy.dlp.v2.ListStoredInfoTypesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.privacy.dlp.v2.ListStoredInfoTypesResponse result) {
if (storedInfoTypesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
storedInfoTypes_ = java.util.Collections.unmodifiableList(storedInfoTypes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.storedInfoTypes_ = storedInfoTypes_;
} else {
result.storedInfoTypes_ = storedInfoTypesBuilder_.build();
}
}
private void buildPartial0(com.google.privacy.dlp.v2.ListStoredInfoTypesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.ListStoredInfoTypesResponse) {
return mergeFrom((com.google.privacy.dlp.v2.ListStoredInfoTypesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.ListStoredInfoTypesResponse other) {
if (other == com.google.privacy.dlp.v2.ListStoredInfoTypesResponse.getDefaultInstance())
return this;
if (storedInfoTypesBuilder_ == null) {
if (!other.storedInfoTypes_.isEmpty()) {
if (storedInfoTypes_.isEmpty()) {
storedInfoTypes_ = other.storedInfoTypes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.addAll(other.storedInfoTypes_);
}
onChanged();
}
} else {
if (!other.storedInfoTypes_.isEmpty()) {
if (storedInfoTypesBuilder_.isEmpty()) {
storedInfoTypesBuilder_.dispose();
storedInfoTypesBuilder_ = null;
storedInfoTypes_ = other.storedInfoTypes_;
bitField0_ = (bitField0_ & ~0x00000001);
storedInfoTypesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getStoredInfoTypesFieldBuilder()
: null;
} else {
storedInfoTypesBuilder_.addAllMessages(other.storedInfoTypes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.privacy.dlp.v2.StoredInfoType m =
input.readMessage(
com.google.privacy.dlp.v2.StoredInfoType.parser(), extensionRegistry);
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.add(m);
} else {
storedInfoTypesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.privacy.dlp.v2.StoredInfoType> storedInfoTypes_ =
java.util.Collections.emptyList();
private void ensureStoredInfoTypesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
storedInfoTypes_ =
new java.util.ArrayList<com.google.privacy.dlp.v2.StoredInfoType>(storedInfoTypes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.StoredInfoType,
com.google.privacy.dlp.v2.StoredInfoType.Builder,
com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder>
storedInfoTypesBuilder_;
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.StoredInfoType> getStoredInfoTypesList() {
if (storedInfoTypesBuilder_ == null) {
return java.util.Collections.unmodifiableList(storedInfoTypes_);
} else {
return storedInfoTypesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public int getStoredInfoTypesCount() {
if (storedInfoTypesBuilder_ == null) {
return storedInfoTypes_.size();
} else {
return storedInfoTypesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public com.google.privacy.dlp.v2.StoredInfoType getStoredInfoTypes(int index) {
if (storedInfoTypesBuilder_ == null) {
return storedInfoTypes_.get(index);
} else {
return storedInfoTypesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder setStoredInfoTypes(int index, com.google.privacy.dlp.v2.StoredInfoType value) {
if (storedInfoTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.set(index, value);
onChanged();
} else {
storedInfoTypesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder setStoredInfoTypes(
int index, com.google.privacy.dlp.v2.StoredInfoType.Builder builderForValue) {
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.set(index, builderForValue.build());
onChanged();
} else {
storedInfoTypesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder addStoredInfoTypes(com.google.privacy.dlp.v2.StoredInfoType value) {
if (storedInfoTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.add(value);
onChanged();
} else {
storedInfoTypesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder addStoredInfoTypes(int index, com.google.privacy.dlp.v2.StoredInfoType value) {
if (storedInfoTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.add(index, value);
onChanged();
} else {
storedInfoTypesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder addStoredInfoTypes(
com.google.privacy.dlp.v2.StoredInfoType.Builder builderForValue) {
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.add(builderForValue.build());
onChanged();
} else {
storedInfoTypesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder addStoredInfoTypes(
int index, com.google.privacy.dlp.v2.StoredInfoType.Builder builderForValue) {
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.add(index, builderForValue.build());
onChanged();
} else {
storedInfoTypesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder addAllStoredInfoTypes(
java.lang.Iterable<? extends com.google.privacy.dlp.v2.StoredInfoType> values) {
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, storedInfoTypes_);
onChanged();
} else {
storedInfoTypesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder clearStoredInfoTypes() {
if (storedInfoTypesBuilder_ == null) {
storedInfoTypes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
storedInfoTypesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public Builder removeStoredInfoTypes(int index) {
if (storedInfoTypesBuilder_ == null) {
ensureStoredInfoTypesIsMutable();
storedInfoTypes_.remove(index);
onChanged();
} else {
storedInfoTypesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public com.google.privacy.dlp.v2.StoredInfoType.Builder getStoredInfoTypesBuilder(int index) {
return getStoredInfoTypesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder getStoredInfoTypesOrBuilder(
int index) {
if (storedInfoTypesBuilder_ == null) {
return storedInfoTypes_.get(index);
} else {
return storedInfoTypesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder>
getStoredInfoTypesOrBuilderList() {
if (storedInfoTypesBuilder_ != null) {
return storedInfoTypesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(storedInfoTypes_);
}
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public com.google.privacy.dlp.v2.StoredInfoType.Builder addStoredInfoTypesBuilder() {
return getStoredInfoTypesFieldBuilder()
.addBuilder(com.google.privacy.dlp.v2.StoredInfoType.getDefaultInstance());
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public com.google.privacy.dlp.v2.StoredInfoType.Builder addStoredInfoTypesBuilder(int index) {
return getStoredInfoTypesFieldBuilder()
.addBuilder(index, com.google.privacy.dlp.v2.StoredInfoType.getDefaultInstance());
}
/**
*
*
* <pre>
* List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.StoredInfoType stored_info_types = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.StoredInfoType.Builder>
getStoredInfoTypesBuilderList() {
return getStoredInfoTypesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.StoredInfoType,
com.google.privacy.dlp.v2.StoredInfoType.Builder,
com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder>
getStoredInfoTypesFieldBuilder() {
if (storedInfoTypesBuilder_ == null) {
storedInfoTypesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.StoredInfoType,
com.google.privacy.dlp.v2.StoredInfoType.Builder,
com.google.privacy.dlp.v2.StoredInfoTypeOrBuilder>(
storedInfoTypes_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
storedInfoTypes_ = null;
}
return storedInfoTypesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If the next page is available then the next page token to be used
* in the following ListStoredInfoTypes request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListStoredInfoTypesResponse)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListStoredInfoTypesResponse)
private static final com.google.privacy.dlp.v2.ListStoredInfoTypesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListStoredInfoTypesResponse();
}
public static com.google.privacy.dlp.v2.ListStoredInfoTypesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListStoredInfoTypesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListStoredInfoTypesResponse>() {
@java.lang.Override
public ListStoredInfoTypesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListStoredInfoTypesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListStoredInfoTypesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListStoredInfoTypesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 37,860 | xalan/third_party/android/platform/external/apache-xml/src/main/java/org/apache/xpath/compiler/Compiler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: Compiler.java 468655 2006-10-28 07:12:06Z minchau $
*/
package org.apache.xpath.compiler;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.SourceLocator;
import javax.xml.transform.TransformerException;
import org.apache.xalan.res.XSLMessages;
import org.apache.xml.dtm.Axis;
import org.apache.xml.dtm.DTMFilter;
import org.apache.xml.dtm.DTMIterator;
import org.apache.xml.utils.PrefixResolver;
import org.apache.xml.utils.QName;
import org.apache.xml.utils.SAXSourceLocator;
import org.apache.xpath.Expression;
import org.apache.xpath.axes.UnionPathIterator;
import org.apache.xpath.axes.WalkerFactory;
import org.apache.xpath.functions.FuncExtFunction;
import org.apache.xpath.functions.FuncExtFunctionAvailable;
import org.apache.xpath.functions.Function;
import org.apache.xpath.functions.WrongNumberArgsException;
import org.apache.xpath.objects.XNumber;
import org.apache.xpath.objects.XString;
import org.apache.xpath.operations.And;
import org.apache.xpath.operations.Div;
import org.apache.xpath.operations.Equals;
import org.apache.xpath.operations.Gt;
import org.apache.xpath.operations.Gte;
import org.apache.xpath.operations.Lt;
import org.apache.xpath.operations.Lte;
import org.apache.xpath.operations.Minus;
import org.apache.xpath.operations.Mod;
import org.apache.xpath.operations.Mult;
import org.apache.xpath.operations.Neg;
import org.apache.xpath.operations.NotEquals;
import org.apache.xpath.operations.Operation;
import org.apache.xpath.operations.Or;
import org.apache.xpath.operations.Plus;
import org.apache.xpath.operations.UnaryOperation;
import org.apache.xpath.operations.Variable;
import org.apache.xpath.patterns.FunctionPattern;
import org.apache.xpath.patterns.NodeTest;
import org.apache.xpath.patterns.StepPattern;
import org.apache.xpath.patterns.UnionPattern;
import org.apache.xpath.res.XPATHErrorResources;
/**
* An instance of this class compiles an XPath string expression into
* a Expression object. This class compiles the string into a sequence
* of operation codes (op map) and then builds from that into an Expression
* tree.
* @xsl.usage advanced
*/
public class Compiler extends OpMap
{
/**
* Construct a Compiler object with a specific ErrorListener and
* SourceLocator where the expression is located.
*
* @param errorHandler Error listener where messages will be sent, or null
* if messages should be sent to System err.
* @param locator The location object where the expression lives, which
* may be null, but which, if not null, must be valid over
* the long haul, in other words, it will not be cloned.
* @param fTable The FunctionTable object where the xpath build-in
* functions are stored.
*/
public Compiler(ErrorListener errorHandler, SourceLocator locator,
FunctionTable fTable)
{
m_errorHandler = errorHandler;
m_locator = locator;
m_functionTable = fTable;
}
/**
* Construct a Compiler instance that has a null error listener and a
* null source locator.
*/
public Compiler()
{
m_errorHandler = null;
m_locator = null;
}
/**
* Execute the XPath object from a given opcode position.
* @param opPos The current position in the xpath.m_opMap array.
* @return The result of the XPath.
*
* @throws TransformerException if there is a syntax or other error.
* @xsl.usage advanced
*/
public Expression compile(int opPos) throws TransformerException
{
int op = getOp(opPos);
Expression expr = null;
// System.out.println(getPatternString()+"op: "+op);
switch (op)
{
case OpCodes.OP_XPATH :
expr = compile(opPos + 2); break;
case OpCodes.OP_OR :
expr = or(opPos); break;
case OpCodes.OP_AND :
expr = and(opPos); break;
case OpCodes.OP_NOTEQUALS :
expr = notequals(opPos); break;
case OpCodes.OP_EQUALS :
expr = equals(opPos); break;
case OpCodes.OP_LTE :
expr = lte(opPos); break;
case OpCodes.OP_LT :
expr = lt(opPos); break;
case OpCodes.OP_GTE :
expr = gte(opPos); break;
case OpCodes.OP_GT :
expr = gt(opPos); break;
case OpCodes.OP_PLUS :
expr = plus(opPos); break;
case OpCodes.OP_MINUS :
expr = minus(opPos); break;
case OpCodes.OP_MULT :
expr = mult(opPos); break;
case OpCodes.OP_DIV :
expr = div(opPos); break;
case OpCodes.OP_MOD :
expr = mod(opPos); break;
// case OpCodes.OP_QUO :
// expr = quo(opPos); break;
case OpCodes.OP_NEG :
expr = neg(opPos); break;
case OpCodes.OP_STRING :
expr = string(opPos); break;
case OpCodes.OP_BOOL :
expr = bool(opPos); break;
case OpCodes.OP_NUMBER :
expr = number(opPos); break;
case OpCodes.OP_UNION :
expr = union(opPos); break;
case OpCodes.OP_LITERAL :
expr = literal(opPos); break;
case OpCodes.OP_VARIABLE :
expr = variable(opPos); break;
case OpCodes.OP_GROUP :
expr = group(opPos); break;
case OpCodes.OP_NUMBERLIT :
expr = numberlit(opPos); break;
case OpCodes.OP_ARGUMENT :
expr = arg(opPos); break;
case OpCodes.OP_EXTFUNCTION :
expr = compileExtension(opPos); break;
case OpCodes.OP_FUNCTION :
expr = compileFunction(opPos); break;
case OpCodes.OP_LOCATIONPATH :
expr = locationPath(opPos); break;
case OpCodes.OP_PREDICATE :
expr = null; break; // should never hit this here.
case OpCodes.OP_MATCHPATTERN :
expr = matchPattern(opPos + 2); break;
case OpCodes.OP_LOCATIONPATHPATTERN :
expr = locationPathPattern(opPos); break;
case OpCodes.OP_QUO:
error(XPATHErrorResources.ER_UNKNOWN_OPCODE,
new Object[]{ "quo" }); //"ERROR! Unknown op code: "+m_opMap[opPos]);
break;
default :
error(XPATHErrorResources.ER_UNKNOWN_OPCODE,
new Object[]{ Integer.toString(getOp(opPos)) }); //"ERROR! Unknown op code: "+m_opMap[opPos]);
}
// if(null != expr)
// expr.setSourceLocator(m_locator);
return expr;
}
/**
* Bottle-neck compilation of an operation with left and right operands.
*
* @param operation non-null reference to parent operation.
* @param opPos The op map position of the parent operation.
*
* @return reference to {@link org.apache.xpath.operations.Operation} instance.
*
* @throws TransformerException if there is a syntax or other error.
*/
private Expression compileOperation(Operation operation, int opPos)
throws TransformerException
{
int leftPos = getFirstChildPos(opPos);
int rightPos = getNextOpPos(leftPos);
operation.setLeftRight(compile(leftPos), compile(rightPos));
return operation;
}
/**
* Bottle-neck compilation of a unary operation.
*
* @param unary The parent unary operation.
* @param opPos The position in the op map of the parent operation.
*
* @return The unary argument.
*
* @throws TransformerException if syntax or other error occurs.
*/
private Expression compileUnary(UnaryOperation unary, int opPos)
throws TransformerException
{
int rightPos = getFirstChildPos(opPos);
unary.setRight(compile(rightPos));
return unary;
}
/**
* Compile an 'or' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Or} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression or(int opPos) throws TransformerException
{
return compileOperation(new Or(), opPos);
}
/**
* Compile an 'and' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.And} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression and(int opPos) throws TransformerException
{
return compileOperation(new And(), opPos);
}
/**
* Compile a '!=' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.NotEquals} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression notequals(int opPos) throws TransformerException
{
return compileOperation(new NotEquals(), opPos);
}
/**
* Compile a '=' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Equals} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression equals(int opPos) throws TransformerException
{
return compileOperation(new Equals(), opPos);
}
/**
* Compile a '<=' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Lte} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression lte(int opPos) throws TransformerException
{
return compileOperation(new Lte(), opPos);
}
/**
* Compile a '<' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Lt} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression lt(int opPos) throws TransformerException
{
return compileOperation(new Lt(), opPos);
}
/**
* Compile a '>=' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Gte} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression gte(int opPos) throws TransformerException
{
return compileOperation(new Gte(), opPos);
}
/**
* Compile a '>' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Gt} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression gt(int opPos) throws TransformerException
{
return compileOperation(new Gt(), opPos);
}
/**
* Compile a '+' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Plus} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression plus(int opPos) throws TransformerException
{
return compileOperation(new Plus(), opPos);
}
/**
* Compile a '-' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Minus} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression minus(int opPos) throws TransformerException
{
return compileOperation(new Minus(), opPos);
}
/**
* Compile a '*' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Mult} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression mult(int opPos) throws TransformerException
{
return compileOperation(new Mult(), opPos);
}
/**
* Compile a 'div' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Div} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression div(int opPos) throws TransformerException
{
return compileOperation(new Div(), opPos);
}
/**
* Compile a 'mod' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Mod} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression mod(int opPos) throws TransformerException
{
return compileOperation(new Mod(), opPos);
}
/*
* Compile a 'quo' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Quo} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
// protected Expression quo(int opPos) throws TransformerException
// {
// return compileOperation(new Quo(), opPos);
// }
/**
* Compile a unary '-' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Neg} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression neg(int opPos) throws TransformerException
{
return compileUnary(new Neg(), opPos);
}
/**
* Compile a 'string(...)' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.String} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression string(int opPos) throws TransformerException
{
return compileUnary(new org.apache.xpath.operations.String(), opPos);
}
/**
* Compile a 'boolean(...)' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Bool} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression bool(int opPos) throws TransformerException
{
return compileUnary(new org.apache.xpath.operations.Bool(), opPos);
}
/**
* Compile a 'number(...)' operation.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Number} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression number(int opPos) throws TransformerException
{
return compileUnary(new org.apache.xpath.operations.Number(), opPos);
}
/**
* Compile a literal string value.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.objects.XString} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression literal(int opPos)
{
opPos = getFirstChildPos(opPos);
return (XString) getTokenQueue().elementAt(getOp(opPos));
}
/**
* Compile a literal number value.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.objects.XNumber} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression numberlit(int opPos)
{
opPos = getFirstChildPos(opPos);
return (XNumber) getTokenQueue().elementAt(getOp(opPos));
}
/**
* Compile a variable reference.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.operations.Variable} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression variable(int opPos) throws TransformerException
{
Variable var = new Variable();
opPos = getFirstChildPos(opPos);
int nsPos = getOp(opPos);
java.lang.String namespace
= (OpCodes.EMPTY == nsPos) ? null
: (java.lang.String) getTokenQueue().elementAt(nsPos);
java.lang.String localname
= (java.lang.String) getTokenQueue().elementAt(getOp(opPos+1));
QName qname = new QName(namespace, localname);
var.setQName(qname);
return var;
}
/**
* Compile an expression group.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to the contained expression.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression group(int opPos) throws TransformerException
{
// no-op
return compile(opPos + 2);
}
/**
* Compile a function argument.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to the argument expression.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression arg(int opPos) throws TransformerException
{
// no-op
return compile(opPos + 2);
}
/**
* Compile a location path union. The UnionPathIterator itself may create
* {@link org.apache.xpath.axes.LocPathIterator} children.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.axes.LocPathIterator} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression union(int opPos) throws TransformerException
{
locPathDepth++;
try
{
return UnionPathIterator.createUnionIterator(this, opPos);
}
finally
{
locPathDepth--;
}
}
private int locPathDepth = -1;
/**
* Get the level of the location path or union being constructed.
* @return 0 if it is a top-level path.
*/
public int getLocationPathDepth()
{
return locPathDepth;
}
/**
* Get the function table
*/
FunctionTable getFunctionTable()
{
return m_functionTable;
}
/**
* Compile a location path. The LocPathIterator itself may create
* {@link org.apache.xpath.axes.AxesWalker} children.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.axes.LocPathIterator} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
public Expression locationPath(int opPos) throws TransformerException
{
locPathDepth++;
try
{
DTMIterator iter = WalkerFactory.newDTMIterator(this, opPos, (locPathDepth == 0));
return (Expression)iter; // cast OK, I guess.
}
finally
{
locPathDepth--;
}
}
/**
* Compile a location step predicate expression.
*
* @param opPos The current position in the m_opMap array.
*
* @return the contained predicate expression.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
public Expression predicate(int opPos) throws TransformerException
{
return compile(opPos + 2);
}
/**
* Compile an entire match pattern expression.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.patterns.UnionPattern} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected Expression matchPattern(int opPos) throws TransformerException
{
locPathDepth++;
try
{
// First, count...
int nextOpPos = opPos;
int i;
for (i = 0; getOp(nextOpPos) == OpCodes.OP_LOCATIONPATHPATTERN; i++)
{
nextOpPos = getNextOpPos(nextOpPos);
}
if (i == 1)
return compile(opPos);
UnionPattern up = new UnionPattern();
StepPattern[] patterns = new StepPattern[i];
for (i = 0; getOp(opPos) == OpCodes.OP_LOCATIONPATHPATTERN; i++)
{
nextOpPos = getNextOpPos(opPos);
patterns[i] = (StepPattern) compile(opPos);
opPos = nextOpPos;
}
up.setPatterns(patterns);
return up;
}
finally
{
locPathDepth--;
}
}
/**
* Compile a location match pattern unit expression.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.patterns.StepPattern} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
public Expression locationPathPattern(int opPos)
throws TransformerException
{
opPos = getFirstChildPos(opPos);
return stepPattern(opPos, 0, null);
}
/**
* Get a {@link org.w3c.dom.traversal.NodeFilter} bit set that tells what
* to show for a given node test.
*
* @param opPos the op map position for the location step.
*
* @return {@link org.w3c.dom.traversal.NodeFilter} bit set that tells what
* to show for a given node test.
*/
public int getWhatToShow(int opPos)
{
int axesType = getOp(opPos);
int testType = getOp(opPos + 3);
// System.out.println("testType: "+testType);
switch (testType)
{
case OpCodes.NODETYPE_COMMENT :
return DTMFilter.SHOW_COMMENT;
case OpCodes.NODETYPE_TEXT :
// return DTMFilter.SHOW_TEXT | DTMFilter.SHOW_COMMENT;
return DTMFilter.SHOW_TEXT | DTMFilter.SHOW_CDATA_SECTION ;
case OpCodes.NODETYPE_PI :
return DTMFilter.SHOW_PROCESSING_INSTRUCTION;
case OpCodes.NODETYPE_NODE :
// return DTMFilter.SHOW_ALL;
switch (axesType)
{
case OpCodes.FROM_NAMESPACE:
return DTMFilter.SHOW_NAMESPACE;
case OpCodes.FROM_ATTRIBUTES :
case OpCodes.MATCH_ATTRIBUTE :
return DTMFilter.SHOW_ATTRIBUTE;
case OpCodes.FROM_SELF:
case OpCodes.FROM_ANCESTORS_OR_SELF:
case OpCodes.FROM_DESCENDANTS_OR_SELF:
return DTMFilter.SHOW_ALL;
default:
if (getOp(0) == OpCodes.OP_MATCHPATTERN)
return ~DTMFilter.SHOW_ATTRIBUTE
& ~DTMFilter.SHOW_DOCUMENT
& ~DTMFilter.SHOW_DOCUMENT_FRAGMENT;
else
return ~DTMFilter.SHOW_ATTRIBUTE;
}
case OpCodes.NODETYPE_ROOT :
return DTMFilter.SHOW_DOCUMENT | DTMFilter.SHOW_DOCUMENT_FRAGMENT;
case OpCodes.NODETYPE_FUNCTEST :
return NodeTest.SHOW_BYFUNCTION;
case OpCodes.NODENAME :
switch (axesType)
{
case OpCodes.FROM_NAMESPACE :
return DTMFilter.SHOW_NAMESPACE;
case OpCodes.FROM_ATTRIBUTES :
case OpCodes.MATCH_ATTRIBUTE :
return DTMFilter.SHOW_ATTRIBUTE;
// break;
case OpCodes.MATCH_ANY_ANCESTOR :
case OpCodes.MATCH_IMMEDIATE_ANCESTOR :
return DTMFilter.SHOW_ELEMENT;
// break;
default :
return DTMFilter.SHOW_ELEMENT;
}
default :
// System.err.println("We should never reach here.");
return DTMFilter.SHOW_ALL;
}
}
private static final boolean DEBUG = false;
/**
* Compile a step pattern unit expression, used for both location paths
* and match patterns.
*
* @param opPos The current position in the m_opMap array.
* @param stepCount The number of steps to expect.
* @param ancestorPattern The owning StepPattern, which may be null.
*
* @return reference to {@link org.apache.xpath.patterns.StepPattern} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
protected StepPattern stepPattern(
int opPos, int stepCount, StepPattern ancestorPattern)
throws TransformerException
{
int startOpPos = opPos;
int stepType = getOp(opPos);
if (OpCodes.ENDOP == stepType)
{
return null;
}
boolean addMagicSelf = true;
int endStep = getNextOpPos(opPos);
// int nextStepType = getOpMap()[endStep];
StepPattern pattern;
// boolean isSimple = ((OpCodes.ENDOP == nextStepType) && (stepCount == 0));
int argLen;
switch (stepType)
{
case OpCodes.OP_FUNCTION :
if(DEBUG)
System.out.println("MATCH_FUNCTION: "+m_currentPattern);
addMagicSelf = false;
argLen = getOp(opPos + OpMap.MAPINDEX_LENGTH);
pattern = new FunctionPattern(compileFunction(opPos), Axis.PARENT, Axis.CHILD);
break;
case OpCodes.FROM_ROOT :
if(DEBUG)
System.out.println("FROM_ROOT, "+m_currentPattern);
addMagicSelf = false;
argLen = getArgLengthOfStep(opPos);
opPos = getFirstChildPosOfStep(opPos);
pattern = new StepPattern(DTMFilter.SHOW_DOCUMENT |
DTMFilter.SHOW_DOCUMENT_FRAGMENT,
Axis.PARENT, Axis.CHILD);
break;
case OpCodes.MATCH_ATTRIBUTE :
if(DEBUG)
System.out.println("MATCH_ATTRIBUTE: "+getStepLocalName(startOpPos)+", "+m_currentPattern);
argLen = getArgLengthOfStep(opPos);
opPos = getFirstChildPosOfStep(opPos);
pattern = new StepPattern(DTMFilter.SHOW_ATTRIBUTE,
getStepNS(startOpPos),
getStepLocalName(startOpPos),
Axis.PARENT, Axis.ATTRIBUTE);
break;
case OpCodes.MATCH_ANY_ANCESTOR :
if(DEBUG)
System.out.println("MATCH_ANY_ANCESTOR: "+getStepLocalName(startOpPos)+", "+m_currentPattern);
argLen = getArgLengthOfStep(opPos);
opPos = getFirstChildPosOfStep(opPos);
int what = getWhatToShow(startOpPos);
// bit-o-hackery, but this code is due for the morgue anyway...
if(0x00000500 == what)
addMagicSelf = false;
pattern = new StepPattern(getWhatToShow(startOpPos),
getStepNS(startOpPos),
getStepLocalName(startOpPos),
Axis.ANCESTOR, Axis.CHILD);
break;
case OpCodes.MATCH_IMMEDIATE_ANCESTOR :
if(DEBUG)
System.out.println("MATCH_IMMEDIATE_ANCESTOR: "+getStepLocalName(startOpPos)+", "+m_currentPattern);
argLen = getArgLengthOfStep(opPos);
opPos = getFirstChildPosOfStep(opPos);
pattern = new StepPattern(getWhatToShow(startOpPos),
getStepNS(startOpPos),
getStepLocalName(startOpPos),
Axis.PARENT, Axis.CHILD);
break;
default :
error(XPATHErrorResources.ER_UNKNOWN_MATCH_OPERATION, null); //"unknown match operation!");
return null;
}
pattern.setPredicates(getCompiledPredicates(opPos + argLen));
if(null == ancestorPattern)
{
// This is the magic and invisible "." at the head of every
// match pattern, and corresponds to the current node in the context
// list, from where predicates are counted.
// So, in order to calculate "foo[3]", it has to count from the
// current node in the context list, so, from that current node,
// the full pattern is really "self::node()/child::foo[3]". If you
// translate this to a select pattern from the node being tested,
// which is really how we're treating match patterns, it works out to
// self::foo/parent::node[child::foo[3]]", or close enough.
/* if(addMagicSelf && pattern.getPredicateCount() > 0)
{
StepPattern selfPattern = new StepPattern(DTMFilter.SHOW_ALL,
Axis.PARENT, Axis.CHILD);
// We need to keep the new nodetest from affecting the score...
XNumber score = pattern.getStaticScore();
pattern.setRelativePathPattern(selfPattern);
pattern.setStaticScore(score);
selfPattern.setStaticScore(score);
}*/
}
else
{
// System.out.println("Setting "+ancestorPattern+" as relative to "+pattern);
pattern.setRelativePathPattern(ancestorPattern);
}
StepPattern relativePathPattern = stepPattern(endStep, stepCount + 1,
pattern);
return (null != relativePathPattern) ? relativePathPattern : pattern;
}
/**
* Compile a zero or more predicates for a given match pattern.
*
* @param opPos The position of the first predicate the m_opMap array.
*
* @return reference to array of {@link org.apache.xpath.Expression} instances.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
public Expression[] getCompiledPredicates(int opPos)
throws TransformerException
{
int count = countPredicates(opPos);
if (count > 0)
{
Expression[] predicates = new Expression[count];
compilePredicates(opPos, predicates);
return predicates;
}
return null;
}
/**
* Count the number of predicates in the step.
*
* @param opPos The position of the first predicate the m_opMap array.
*
* @return The number of predicates for this step.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
public int countPredicates(int opPos) throws TransformerException
{
int count = 0;
while (OpCodes.OP_PREDICATE == getOp(opPos))
{
count++;
opPos = getNextOpPos(opPos);
}
return count;
}
/**
* Compiles predicates in the step.
*
* @param opPos The position of the first predicate the m_opMap array.
* @param predicates An empty pre-determined array of
* {@link org.apache.xpath.Expression}s, that will be filled in.
*
* @throws TransformerException
*/
private void compilePredicates(int opPos, Expression[] predicates)
throws TransformerException
{
for (int i = 0; OpCodes.OP_PREDICATE == getOp(opPos); i++)
{
predicates[i] = predicate(opPos);
opPos = getNextOpPos(opPos);
}
}
/**
* Compile a built-in XPath function.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.functions.Function} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
Expression compileFunction(int opPos) throws TransformerException
{
int endFunc = opPos + getOp(opPos + 1) - 1;
opPos = getFirstChildPos(opPos);
int funcID = getOp(opPos);
opPos++;
if (-1 != funcID)
{
Function func = m_functionTable.getFunction(funcID);
/**
* It is a trick for function-available. Since the function table is an
* instance field, insert this table at compilation time for later usage
*/
if (func instanceof FuncExtFunctionAvailable)
((FuncExtFunctionAvailable) func).setFunctionTable(m_functionTable);
func.postCompileStep(this);
try
{
int i = 0;
for (int p = opPos; p < endFunc; p = getNextOpPos(p), i++)
{
// System.out.println("argPos: "+ p);
// System.out.println("argCode: "+ m_opMap[p]);
func.setArg(compile(p), i);
}
func.checkNumberArgs(i);
}
catch (WrongNumberArgsException wnae)
{
java.lang.String name = m_functionTable.getFunctionName(funcID);
m_errorHandler.fatalError( new TransformerException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_ONLY_ALLOWS,
new Object[]{name, wnae.getMessage()}), m_locator));
//"name + " only allows " + wnae.getMessage() + " arguments", m_locator));
}
return func;
}
else
{
error(XPATHErrorResources.ER_FUNCTION_TOKEN_NOT_FOUND, null); //"function token not found.");
return null;
}
}
// The current id for extension functions.
private static long s_nextMethodId = 0;
/**
* Get the next available method id
*/
synchronized private long getNextMethodId()
{
if (s_nextMethodId == Long.MAX_VALUE)
s_nextMethodId = 0;
return s_nextMethodId++;
}
/**
* Compile an extension function.
*
* @param opPos The current position in the m_opMap array.
*
* @return reference to {@link org.apache.xpath.functions.FuncExtFunction} instance.
*
* @throws TransformerException if a error occurs creating the Expression.
*/
private Expression compileExtension(int opPos)
throws TransformerException
{
int endExtFunc = opPos + getOp(opPos + 1) - 1;
opPos = getFirstChildPos(opPos);
java.lang.String ns = (java.lang.String) getTokenQueue().elementAt(getOp(opPos));
opPos++;
java.lang.String funcName =
(java.lang.String) getTokenQueue().elementAt(getOp(opPos));
opPos++;
// We create a method key to uniquely identify this function so that we
// can cache the object needed to invoke it. This way, we only pay the
// reflection overhead on the first call.
Function extension = new FuncExtFunction(ns, funcName, String.valueOf(getNextMethodId()));
try
{
int i = 0;
while (opPos < endExtFunc)
{
int nextOpPos = getNextOpPos(opPos);
extension.setArg(this.compile(opPos), i);
opPos = nextOpPos;
i++;
}
}
catch (WrongNumberArgsException wnae)
{
; // should never happen
}
return extension;
}
/**
* Warn the user of an problem.
*
* @param msg An error msgkey that corresponds to one of the constants found
* in {@link org.apache.xpath.res.XPATHErrorResources}, which is
* a key for a format string.
* @param args An array of arguments represented in the format string, which
* may be null.
*
* @throws TransformerException if the current ErrorListoner determines to
* throw an exception.
*/
public void warn(String msg, Object[] args) throws TransformerException
{
java.lang.String fmsg = XSLMessages.createXPATHWarning(msg, args);
if (null != m_errorHandler)
{
m_errorHandler.warning(new TransformerException(fmsg, m_locator));
}
else
{
System.out.println(fmsg
+"; file "+m_locator.getSystemId()
+"; line "+m_locator.getLineNumber()
+"; column "+m_locator.getColumnNumber());
}
}
/**
* Tell the user of an assertion error, and probably throw an
* exception.
*
* @param b If false, a runtime exception will be thrown.
* @param msg The assertion message, which should be informative.
*
* @throws RuntimeException if the b argument is false.
*/
public void assertion(boolean b, java.lang.String msg)
{
if (!b)
{
java.lang.String fMsg = XSLMessages.createXPATHMessage(
XPATHErrorResources.ER_INCORRECT_PROGRAMMER_ASSERTION,
new Object[]{ msg });
throw new RuntimeException(fMsg);
}
}
/**
* Tell the user of an error, and probably throw an
* exception.
*
* @param msg An error msgkey that corresponds to one of the constants found
* in {@link org.apache.xpath.res.XPATHErrorResources}, which is
* a key for a format string.
* @param args An array of arguments represented in the format string, which
* may be null.
*
* @throws TransformerException if the current ErrorListoner determines to
* throw an exception.
*/
public void error(String msg, Object[] args) throws TransformerException
{
java.lang.String fmsg = XSLMessages.createXPATHMessage(msg, args);
if (null != m_errorHandler)
{
m_errorHandler.fatalError(new TransformerException(fmsg, m_locator));
}
else
{
// System.out.println(te.getMessage()
// +"; file "+te.getSystemId()
// +"; line "+te.getLineNumber()
// +"; column "+te.getColumnNumber());
throw new TransformerException(fmsg, (SAXSourceLocator)m_locator);
}
}
/**
* The current prefixResolver for the execution context.
*/
private PrefixResolver m_currentPrefixResolver = null;
/**
* Get the current namespace context for the xpath.
*
* @return The current prefix resolver, *may* be null, though hopefully not.
*/
public PrefixResolver getNamespaceContext()
{
return m_currentPrefixResolver;
}
/**
* Set the current namespace context for the xpath.
*
* @param pr The resolver for prefixes in the XPath expression.
*/
public void setNamespaceContext(PrefixResolver pr)
{
m_currentPrefixResolver = pr;
}
/** The error listener where errors will be sent. If this is null, errors
* and warnings will be sent to System.err. May be null. */
ErrorListener m_errorHandler;
/** The source locator for the expression being compiled. May be null. */
SourceLocator m_locator;
/**
* The FunctionTable for all xpath build-in functions
*/
private FunctionTable m_functionTable;
}
|
googleapis/google-cloud-java | 37,958 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/CreateEncryptionConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/cmek.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Create EncryptionConfig Request
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateEncryptionConfigRequest}
*/
public final class CreateEncryptionConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.CreateEncryptionConfigRequest)
CreateEncryptionConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateEncryptionConfigRequest.newBuilder() to construct.
private CreateEncryptionConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateEncryptionConfigRequest() {
parent_ = "";
encryptionConfigId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateEncryptionConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.CmekProto
.internal_static_google_cloud_dataplex_v1_CreateEncryptionConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.CmekProto
.internal_static_google_cloud_dataplex_v1_CreateEncryptionConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.class,
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENCRYPTION_CONFIG_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object encryptionConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The encryptionConfigId.
*/
@java.lang.Override
public java.lang.String getEncryptionConfigId() {
java.lang.Object ref = encryptionConfigId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
encryptionConfigId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for encryptionConfigId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEncryptionConfigIdBytes() {
java.lang.Object ref = encryptionConfigId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
encryptionConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENCRYPTION_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.dataplex.v1.EncryptionConfig encryptionConfig_;
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the encryptionConfig field is set.
*/
@java.lang.Override
public boolean hasEncryptionConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The encryptionConfig.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.EncryptionConfig getEncryptionConfig() {
return encryptionConfig_ == null
? com.google.cloud.dataplex.v1.EncryptionConfig.getDefaultInstance()
: encryptionConfig_;
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() {
return encryptionConfig_ == null
? com.google.cloud.dataplex.v1.EncryptionConfig.getDefaultInstance()
: encryptionConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(encryptionConfigId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, encryptionConfigId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getEncryptionConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(encryptionConfigId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, encryptionConfigId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getEncryptionConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest other =
(com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getEncryptionConfigId().equals(other.getEncryptionConfigId())) return false;
if (hasEncryptionConfig() != other.hasEncryptionConfig()) return false;
if (hasEncryptionConfig()) {
if (!getEncryptionConfig().equals(other.getEncryptionConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + ENCRYPTION_CONFIG_ID_FIELD_NUMBER;
hash = (53 * hash) + getEncryptionConfigId().hashCode();
if (hasEncryptionConfig()) {
hash = (37 * hash) + ENCRYPTION_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getEncryptionConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Create EncryptionConfig Request
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateEncryptionConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.CreateEncryptionConfigRequest)
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.CmekProto
.internal_static_google_cloud_dataplex_v1_CreateEncryptionConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.CmekProto
.internal_static_google_cloud_dataplex_v1_CreateEncryptionConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.class,
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEncryptionConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
encryptionConfigId_ = "";
encryptionConfig_ = null;
if (encryptionConfigBuilder_ != null) {
encryptionConfigBuilder_.dispose();
encryptionConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.CmekProto
.internal_static_google_cloud_dataplex_v1_CreateEncryptionConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest build() {
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest buildPartial() {
com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest result =
new com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.encryptionConfigId_ = encryptionConfigId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.encryptionConfig_ =
encryptionConfigBuilder_ == null ? encryptionConfig_ : encryptionConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest) {
return mergeFrom((com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest other) {
if (other == com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getEncryptionConfigId().isEmpty()) {
encryptionConfigId_ = other.encryptionConfigId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasEncryptionConfig()) {
mergeEncryptionConfig(other.getEncryptionConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
encryptionConfigId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(
getEncryptionConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The location at which the EncryptionConfig is to be created.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object encryptionConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The encryptionConfigId.
*/
public java.lang.String getEncryptionConfigId() {
java.lang.Object ref = encryptionConfigId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
encryptionConfigId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for encryptionConfigId.
*/
public com.google.protobuf.ByteString getEncryptionConfigIdBytes() {
java.lang.Object ref = encryptionConfigId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
encryptionConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The encryptionConfigId to set.
* @return This builder for chaining.
*/
public Builder setEncryptionConfigId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
encryptionConfigId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearEncryptionConfigId() {
encryptionConfigId_ = getDefaultInstance().getEncryptionConfigId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the
* [EncryptionConfig][google.cloud.dataplex.v1.EncryptionConfig] to create.
* Currently, only a value of "default" is supported.
* </pre>
*
* <code>string encryption_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for encryptionConfigId to set.
* @return This builder for chaining.
*/
public Builder setEncryptionConfigIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
encryptionConfigId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.dataplex.v1.EncryptionConfig encryptionConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.EncryptionConfig,
com.google.cloud.dataplex.v1.EncryptionConfig.Builder,
com.google.cloud.dataplex.v1.EncryptionConfigOrBuilder>
encryptionConfigBuilder_;
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the encryptionConfig field is set.
*/
public boolean hasEncryptionConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The encryptionConfig.
*/
public com.google.cloud.dataplex.v1.EncryptionConfig getEncryptionConfig() {
if (encryptionConfigBuilder_ == null) {
return encryptionConfig_ == null
? com.google.cloud.dataplex.v1.EncryptionConfig.getDefaultInstance()
: encryptionConfig_;
} else {
return encryptionConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEncryptionConfig(com.google.cloud.dataplex.v1.EncryptionConfig value) {
if (encryptionConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
encryptionConfig_ = value;
} else {
encryptionConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEncryptionConfig(
com.google.cloud.dataplex.v1.EncryptionConfig.Builder builderForValue) {
if (encryptionConfigBuilder_ == null) {
encryptionConfig_ = builderForValue.build();
} else {
encryptionConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEncryptionConfig(com.google.cloud.dataplex.v1.EncryptionConfig value) {
if (encryptionConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& encryptionConfig_ != null
&& encryptionConfig_
!= com.google.cloud.dataplex.v1.EncryptionConfig.getDefaultInstance()) {
getEncryptionConfigBuilder().mergeFrom(value);
} else {
encryptionConfig_ = value;
}
} else {
encryptionConfigBuilder_.mergeFrom(value);
}
if (encryptionConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEncryptionConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
encryptionConfig_ = null;
if (encryptionConfigBuilder_ != null) {
encryptionConfigBuilder_.dispose();
encryptionConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.EncryptionConfig.Builder getEncryptionConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getEncryptionConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() {
if (encryptionConfigBuilder_ != null) {
return encryptionConfigBuilder_.getMessageOrBuilder();
} else {
return encryptionConfig_ == null
? com.google.cloud.dataplex.v1.EncryptionConfig.getDefaultInstance()
: encryptionConfig_;
}
}
/**
*
*
* <pre>
* Required. The EncryptionConfig to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.EncryptionConfig encryption_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.EncryptionConfig,
com.google.cloud.dataplex.v1.EncryptionConfig.Builder,
com.google.cloud.dataplex.v1.EncryptionConfigOrBuilder>
getEncryptionConfigFieldBuilder() {
if (encryptionConfigBuilder_ == null) {
encryptionConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.EncryptionConfig,
com.google.cloud.dataplex.v1.EncryptionConfig.Builder,
com.google.cloud.dataplex.v1.EncryptionConfigOrBuilder>(
getEncryptionConfig(), getParentForChildren(), isClean());
encryptionConfig_ = null;
}
return encryptionConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.CreateEncryptionConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.CreateEncryptionConfigRequest)
private static final com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest();
}
public static com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateEncryptionConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateEncryptionConfigRequest>() {
@java.lang.Override
public CreateEncryptionConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateEncryptionConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateEncryptionConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 37,902 | java-common-protos/proto-google-common-protos/src/main/java/com/google/rpc/Status.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/rpc/status.proto
// Protobuf Java Version: 3.25.8
package com.google.rpc;
/**
*
*
* <pre>
* The `Status` type defines a logical error model that is suitable for
* different programming environments, including REST APIs and RPC APIs. It is
* used by [gRPC](https://github.com/grpc). Each `Status` message contains
* three pieces of data: error code, error message, and error details.
*
* You can find out more about this error model and how to work with it in the
* [API Design Guide](https://cloud.google.com/apis/design/errors).
* </pre>
*
* Protobuf type {@code google.rpc.Status}
*/
public final class Status extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.rpc.Status)
StatusOrBuilder {
private static final long serialVersionUID = 0L;
// Use Status.newBuilder() to construct.
private Status(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Status() {
message_ = "";
details_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Status();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.rpc.StatusProto.internal_static_google_rpc_Status_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.rpc.StatusProto.internal_static_google_rpc_Status_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.rpc.Status.class, com.google.rpc.Status.Builder.class);
}
public static final int CODE_FIELD_NUMBER = 1;
private int code_ = 0;
/**
*
*
* <pre>
* The status code, which should be an enum value of
* [google.rpc.Code][google.rpc.Code].
* </pre>
*
* <code>int32 code = 1;</code>
*
* @return The code.
*/
@java.lang.Override
public int getCode() {
return code_;
}
public static final int MESSAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
}
}
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @return The bytes for message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DETAILS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private java.util.List<com.google.protobuf.Any> details_;
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
@java.lang.Override
public java.util.List<com.google.protobuf.Any> getDetailsList() {
return details_;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.protobuf.AnyOrBuilder> getDetailsOrBuilderList() {
return details_;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
@java.lang.Override
public int getDetailsCount() {
return details_.size();
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.Any getDetails(int index) {
return details_.get(index);
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.AnyOrBuilder getDetailsOrBuilder(int index) {
return details_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (code_ != 0) {
output.writeInt32(1, code_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_);
}
for (int i = 0; i < details_.size(); i++) {
output.writeMessage(3, details_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (code_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, code_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_);
}
for (int i = 0; i < details_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, details_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.rpc.Status)) {
return super.equals(obj);
}
com.google.rpc.Status other = (com.google.rpc.Status) obj;
if (getCode() != other.getCode()) return false;
if (!getMessage().equals(other.getMessage())) return false;
if (!getDetailsList().equals(other.getDetailsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CODE_FIELD_NUMBER;
hash = (53 * hash) + getCode();
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
if (getDetailsCount() > 0) {
hash = (37 * hash) + DETAILS_FIELD_NUMBER;
hash = (53 * hash) + getDetailsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.rpc.Status parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.rpc.Status parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.rpc.Status parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.rpc.Status parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.rpc.Status parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.rpc.Status parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.rpc.Status parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.rpc.Status parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.rpc.Status parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.rpc.Status parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.rpc.Status parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.rpc.Status parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.rpc.Status prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The `Status` type defines a logical error model that is suitable for
* different programming environments, including REST APIs and RPC APIs. It is
* used by [gRPC](https://github.com/grpc). Each `Status` message contains
* three pieces of data: error code, error message, and error details.
*
* You can find out more about this error model and how to work with it in the
* [API Design Guide](https://cloud.google.com/apis/design/errors).
* </pre>
*
* Protobuf type {@code google.rpc.Status}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.rpc.Status)
com.google.rpc.StatusOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.rpc.StatusProto.internal_static_google_rpc_Status_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.rpc.StatusProto.internal_static_google_rpc_Status_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.rpc.Status.class, com.google.rpc.Status.Builder.class);
}
// Construct using com.google.rpc.Status.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
code_ = 0;
message_ = "";
if (detailsBuilder_ == null) {
details_ = java.util.Collections.emptyList();
} else {
details_ = null;
detailsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.rpc.StatusProto.internal_static_google_rpc_Status_descriptor;
}
@java.lang.Override
public com.google.rpc.Status getDefaultInstanceForType() {
return com.google.rpc.Status.getDefaultInstance();
}
@java.lang.Override
public com.google.rpc.Status build() {
com.google.rpc.Status result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.rpc.Status buildPartial() {
com.google.rpc.Status result = new com.google.rpc.Status(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.rpc.Status result) {
if (detailsBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
details_ = java.util.Collections.unmodifiableList(details_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.details_ = details_;
} else {
result.details_ = detailsBuilder_.build();
}
}
private void buildPartial0(com.google.rpc.Status result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.code_ = code_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.message_ = message_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.rpc.Status) {
return mergeFrom((com.google.rpc.Status) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.rpc.Status other) {
if (other == com.google.rpc.Status.getDefaultInstance()) return this;
if (other.getCode() != 0) {
setCode(other.getCode());
}
if (!other.getMessage().isEmpty()) {
message_ = other.message_;
bitField0_ |= 0x00000002;
onChanged();
}
if (detailsBuilder_ == null) {
if (!other.details_.isEmpty()) {
if (details_.isEmpty()) {
details_ = other.details_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureDetailsIsMutable();
details_.addAll(other.details_);
}
onChanged();
}
} else {
if (!other.details_.isEmpty()) {
if (detailsBuilder_.isEmpty()) {
detailsBuilder_.dispose();
detailsBuilder_ = null;
details_ = other.details_;
bitField0_ = (bitField0_ & ~0x00000004);
detailsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDetailsFieldBuilder()
: null;
} else {
detailsBuilder_.addAllMessages(other.details_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
code_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
message_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
com.google.protobuf.Any m =
input.readMessage(com.google.protobuf.Any.parser(), extensionRegistry);
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
details_.add(m);
} else {
detailsBuilder_.addMessage(m);
}
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int code_;
/**
*
*
* <pre>
* The status code, which should be an enum value of
* [google.rpc.Code][google.rpc.Code].
* </pre>
*
* <code>int32 code = 1;</code>
*
* @return The code.
*/
@java.lang.Override
public int getCode() {
return code_;
}
/**
*
*
* <pre>
* The status code, which should be an enum value of
* [google.rpc.Code][google.rpc.Code].
* </pre>
*
* <code>int32 code = 1;</code>
*
* @param value The code to set.
* @return This builder for chaining.
*/
public Builder setCode(int value) {
code_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The status code, which should be an enum value of
* [google.rpc.Code][google.rpc.Code].
* </pre>
*
* <code>int32 code = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCode() {
bitField0_ = (bitField0_ & ~0x00000001);
code_ = 0;
onChanged();
return this;
}
private java.lang.Object message_ = "";
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @return The bytes for message.
*/
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A developer-facing error message, which should be in English. Any
* user-facing error message should be localized and sent in the
* [google.rpc.Status.details][google.rpc.Status.details] field, or localized
* by the client.
* </pre>
*
* <code>string message = 2;</code>
*
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
message_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.util.List<com.google.protobuf.Any> details_ = java.util.Collections.emptyList();
private void ensureDetailsIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
details_ = new java.util.ArrayList<com.google.protobuf.Any>(details_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.protobuf.Any,
com.google.protobuf.Any.Builder,
com.google.protobuf.AnyOrBuilder>
detailsBuilder_;
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public java.util.List<com.google.protobuf.Any> getDetailsList() {
if (detailsBuilder_ == null) {
return java.util.Collections.unmodifiableList(details_);
} else {
return detailsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public int getDetailsCount() {
if (detailsBuilder_ == null) {
return details_.size();
} else {
return detailsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public com.google.protobuf.Any getDetails(int index) {
if (detailsBuilder_ == null) {
return details_.get(index);
} else {
return detailsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder setDetails(int index, com.google.protobuf.Any value) {
if (detailsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDetailsIsMutable();
details_.set(index, value);
onChanged();
} else {
detailsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder setDetails(int index, com.google.protobuf.Any.Builder builderForValue) {
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
details_.set(index, builderForValue.build());
onChanged();
} else {
detailsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder addDetails(com.google.protobuf.Any value) {
if (detailsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDetailsIsMutable();
details_.add(value);
onChanged();
} else {
detailsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder addDetails(int index, com.google.protobuf.Any value) {
if (detailsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDetailsIsMutable();
details_.add(index, value);
onChanged();
} else {
detailsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder addDetails(com.google.protobuf.Any.Builder builderForValue) {
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
details_.add(builderForValue.build());
onChanged();
} else {
detailsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder addDetails(int index, com.google.protobuf.Any.Builder builderForValue) {
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
details_.add(index, builderForValue.build());
onChanged();
} else {
detailsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder addAllDetails(java.lang.Iterable<? extends com.google.protobuf.Any> values) {
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, details_);
onChanged();
} else {
detailsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder clearDetails() {
if (detailsBuilder_ == null) {
details_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
detailsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public Builder removeDetails(int index) {
if (detailsBuilder_ == null) {
ensureDetailsIsMutable();
details_.remove(index);
onChanged();
} else {
detailsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public com.google.protobuf.Any.Builder getDetailsBuilder(int index) {
return getDetailsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public com.google.protobuf.AnyOrBuilder getDetailsOrBuilder(int index) {
if (detailsBuilder_ == null) {
return details_.get(index);
} else {
return detailsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public java.util.List<? extends com.google.protobuf.AnyOrBuilder> getDetailsOrBuilderList() {
if (detailsBuilder_ != null) {
return detailsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(details_);
}
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public com.google.protobuf.Any.Builder addDetailsBuilder() {
return getDetailsFieldBuilder().addBuilder(com.google.protobuf.Any.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public com.google.protobuf.Any.Builder addDetailsBuilder(int index) {
return getDetailsFieldBuilder()
.addBuilder(index, com.google.protobuf.Any.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of messages that carry the error details. There is a common set of
* message types for APIs to use.
* </pre>
*
* <code>repeated .google.protobuf.Any details = 3;</code>
*/
public java.util.List<com.google.protobuf.Any.Builder> getDetailsBuilderList() {
return getDetailsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.protobuf.Any,
com.google.protobuf.Any.Builder,
com.google.protobuf.AnyOrBuilder>
getDetailsFieldBuilder() {
if (detailsBuilder_ == null) {
detailsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.protobuf.Any,
com.google.protobuf.Any.Builder,
com.google.protobuf.AnyOrBuilder>(
details_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean());
details_ = null;
}
return detailsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.rpc.Status)
}
// @@protoc_insertion_point(class_scope:google.rpc.Status)
private static final com.google.rpc.Status DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.rpc.Status();
}
public static com.google.rpc.Status getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Status> PARSER =
new com.google.protobuf.AbstractParser<Status>() {
@java.lang.Override
public Status parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Status> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Status> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.rpc.Status getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,886 | jaxp/src/com/sun/org/apache/xalan/internal/xsltc/compiler/util/ErrorMessages_ca.java | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: ErrorMessages_ca.java,v 1.1.6.1 2005/09/05 11:52:59 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.compiler.util;
import java.util.ListResourceBundle;
/**
* @author Morten Jorgensen
*/
public class ErrorMessages_ca extends ListResourceBundle {
/*
* XSLTC compile-time error messages.
*
* General notes to translators and definitions:
*
* 1) XSLTC is the name of the product. It is an acronym for "XSLT Compiler".
* XSLT is an acronym for "XML Stylesheet Language: Transformations".
*
* 2) A stylesheet is a description of how to transform an input XML document
* into a resultant XML document (or HTML document or text). The
* stylesheet itself is described in the form of an XML document.
*
* 3) A template is a component of a stylesheet that is used to match a
* particular portion of an input document and specifies the form of the
* corresponding portion of the output document.
*
* 4) An axis is a particular "dimension" in a tree representation of an XML
* document; the nodes in the tree are divided along different axes.
* Traversing the "child" axis, for instance, means that the program
* would visit each child of a particular node; traversing the "descendant"
* axis means that the program would visit the child nodes of a particular
* node, their children, and so on until the leaf nodes of the tree are
* reached.
*
* 5) An iterator is an object that traverses nodes in a tree along a
* particular axis, one at a time.
*
* 6) An element is a mark-up tag in an XML document; an attribute is a
* modifier on the tag. For example, in <elem attr='val' attr2='val2'>
* "elem" is an element name, "attr" and "attr2" are attribute names with
* the values "val" and "val2", respectively.
*
* 7) A namespace declaration is a special attribute that is used to associate
* a prefix with a URI (the namespace). The meanings of element names and
* attribute names that use that prefix are defined with respect to that
* namespace.
*
* 8) DOM is an acronym for Document Object Model. It is a tree
* representation of an XML document.
*
* SAX is an acronym for the Simple API for XML processing. It is an API
* used inform an XML processor (in this case XSLTC) of the structure and
* content of an XML document.
*
* Input to the stylesheet processor can come from an XML parser in the
* form of a DOM tree or through the SAX API.
*
* 9) DTD is a document type declaration. It is a way of specifying the
* grammar for an XML file, the names and types of elements, attributes,
* etc.
*
* 10) XPath is a specification that describes a notation for identifying
* nodes in a tree-structured representation of an XML document. An
* instance of that notation is referred to as an XPath expression.
*
* 11) Translet is an invented term that refers to the class file that contains
* the compiled form of a stylesheet.
*/
// These message should be read from a locale-specific resource bundle
/** Get the lookup table for error messages.
*
* @return The message lookup table.
*/
public Object[][] getContents()
{
return new Object[][] {
{ErrorMsg.MULTIPLE_STYLESHEET_ERR,
"S'ha definit m\u00e9s d'un full d'estils en el mateix fitxer."},
/*
* Note to translators: The substitution text is the name of a
* template. The same name was used on two different templates in the
* same stylesheet.
*/
{ErrorMsg.TEMPLATE_REDEF_ERR,
"La plantilla ''{0}'' ja est\u00e0 definida en aquest full d''estils."},
/*
* Note to translators: The substitution text is the name of a
* template. A reference to the template name was encountered, but the
* template is undefined.
*/
{ErrorMsg.TEMPLATE_UNDEF_ERR,
"La plantilla ''{0}'' no est\u00e0 definida en aquest full d''estils."},
/*
* Note to translators: The substitution text is the name of a variable
* that was defined more than once.
*/
{ErrorMsg.VARIABLE_REDEF_ERR,
"La variable ''{0}'' s''ha definit m\u00e9s d''una vegada en el mateix \u00e0mbit."},
/*
* Note to translators: The substitution text is the name of a variable
* or parameter. A reference to the variable or parameter was found,
* but it was never defined.
*/
{ErrorMsg.VARIABLE_UNDEF_ERR,
"La variable o el par\u00e0metre ''{0}'' no s''ha definit."},
/*
* Note to translators: The word "class" here refers to a Java class.
* Processing the stylesheet required a class to be loaded, but it could
* not be found. The substitution text is the name of the class.
*/
{ErrorMsg.CLASS_NOT_FOUND_ERR,
"No s''ha trobat la classe ''{0}''."},
/*
* Note to translators: The word "method" here refers to a Java method.
* Processing the stylesheet required a reference to the method named by
* the substitution text, but it could not be found. "public" is the
* Java keyword.
*/
{ErrorMsg.METHOD_NOT_FOUND_ERR,
"No s''ha trobat el m\u00e8tode extern ''{0}'' (ha de ser public)."},
/*
* Note to translators: The word "method" here refers to a Java method.
* Processing the stylesheet required a reference to the method named by
* the substitution text, but no method with the required types of
* arguments or return type could be found.
*/
{ErrorMsg.ARGUMENT_CONVERSION_ERR,
"No s''ha pogut convertir l''argument o tipus de retorn a la crida del m\u00e8tode ''{0}''"},
/*
* Note to translators: The file or URI named in the substitution text
* is missing.
*/
{ErrorMsg.FILE_NOT_FOUND_ERR,
"No s''ha trobat el fitxer o URI ''{0}''."},
/*
* Note to translators: This message is displayed when the URI
* mentioned in the substitution text is not well-formed syntactically.
*/
{ErrorMsg.INVALID_URI_ERR,
"L''URI ''{0}'' no \u00e9s v\u00e0lid."},
/*
* Note to translators: The file or URI named in the substitution text
* exists but could not be opened.
*/
{ErrorMsg.FILE_ACCESS_ERR,
"No es pot obrir el fitxer o l''URI ''{0}''."},
/*
* Note to translators: <xsl:stylesheet> and <xsl:transform> are
* keywords that should not be translated.
*/
{ErrorMsg.MISSING_ROOT_ERR,
"S''esperava l''element <xsl:stylesheet> o <xsl:transform>."},
/*
* Note to translators: The stylesheet contained a reference to a
* namespace prefix that was undefined. The value of the substitution
* text is the name of the prefix.
*/
{ErrorMsg.NAMESPACE_UNDEF_ERR,
"El prefix d''espai de noms ''{0}'' no s''ha declarat."},
/*
* Note to translators: The Java function named in the stylesheet could
* not be found.
*/
{ErrorMsg.FUNCTION_RESOLVE_ERR,
"No s''ha pogut resoldre la crida de la funci\u00f3 ''{0}''."},
/*
* Note to translators: The substitution text is the name of a
* function. A literal string here means a constant string value.
*/
{ErrorMsg.NEED_LITERAL_ERR,
"L''argument de ''{0}'' ha de ser una cadena de literals."},
/*
* Note to translators: This message indicates there was a syntactic
* error in the form of an XPath expression. The substitution text is
* the expression.
*/
{ErrorMsg.XPATH_PARSER_ERR,
"S''ha produ\u00eft un error en analitzar l''expressi\u00f3 XPath ''{0}''."},
/*
* Note to translators: An element in the stylesheet requires a
* particular attribute named by the substitution text, but that
* attribute was not specified in the stylesheet.
*/
{ErrorMsg.REQUIRED_ATTR_ERR,
"No s''ha especificat l''atribut obligatori ''{0}''."},
/*
* Note to translators: This message indicates that a character not
* permitted in an XPath expression was encountered. The substitution
* text is the offending character.
*/
{ErrorMsg.ILLEGAL_CHAR_ERR,
"L''expressi\u00f3 XPath cont\u00e9 el car\u00e0cter no perm\u00e8s ''{0}''."},
/*
* Note to translators: A processing instruction is a mark-up item in
* an XML document that request some behaviour of an XML processor. The
* form of the name of was invalid in this case, and the substitution
* text is the name.
*/
{ErrorMsg.ILLEGAL_PI_ERR,
"La instrucci\u00f3 de processament t\u00e9 el nom no perm\u00e8s ''{0}''."},
/*
* Note to translators: This message is reported if the stylesheet
* being processed attempted to construct an XML document with an
* attribute in a place other than on an element. The substitution text
* specifies the name of the attribute.
*/
{ErrorMsg.STRAY_ATTRIBUTE_ERR,
"L''atribut ''{0}'' es troba fora de l''element."},
/*
* Note to translators: An attribute that wasn't recognized was
* specified on an element in the stylesheet. The attribute is named
* by the substitution
* text.
*/
{ErrorMsg.ILLEGAL_ATTRIBUTE_ERR,
"No es permet l''atribut ''{0}''."},
/*
* Note to translators: "import" and "include" are keywords that should
* not be translated. This messages indicates that the stylesheet
* named in the substitution text imported or included itself either
* directly or indirectly.
*/
{ErrorMsg.CIRCULAR_INCLUDE_ERR,
"Import/include circular. El full d''estils ''{0}'' ja s''ha carregat."},
/*
* Note to translators: A result-tree fragment is a portion of a
* resulting XML document represented as a tree. "<xsl:sort>" is a
* keyword and should not be translated.
*/
{ErrorMsg.RESULT_TREE_SORT_ERR,
"Els fragments de l'arbre de resultats no es poden classificar (es passen per alt els elements <xsl:sort>). Heu de classificar els nodes quan creeu l'arbre de resultats. "},
/*
* Note to translators: A name can be given to a particular style to be
* used to format decimal values. The substitution text gives the name
* of such a style for which more than one declaration was encountered.
*/
{ErrorMsg.SYMBOLS_REDEF_ERR,
"El formatatge decimal ''{0}'' ja est\u00e0 definit."},
/*
* Note to translators: The stylesheet version named in the
* substitution text is not supported.
*/
{ErrorMsg.XSL_VERSION_ERR,
"XSLTC no d\u00f3na suport a la versi\u00f3 XSL ''{0}''."},
/*
* Note to translators: The definitions of one or more variables or
* parameters depend on one another.
*/
{ErrorMsg.CIRCULAR_VARIABLE_ERR,
"Hi ha una refer\u00e8ncia de variable/par\u00e0metre circular a ''{0}''."},
/*
* Note to translators: The operator in an expresion with two operands was
* not recognized.
*/
{ErrorMsg.ILLEGAL_BINARY_OP_ERR,
"L'operador de l'expressi\u00f3 bin\u00e0ria \u00e9s desconegut."},
/*
* Note to translators: This message is produced if a reference to a
* function has too many or too few arguments.
*/
{ErrorMsg.ILLEGAL_ARG_ERR,
"La crida de funci\u00f3 t\u00e9 arguments no permesos."},
/*
* Note to translators: "document()" is the name of function and must
* not be translated. A node-set is a set of the nodes in the tree
* representation of an XML document.
*/
{ErrorMsg.DOCUMENT_ARG_ERR,
"El segon argument de la funci\u00f3 document() ha de ser un conjunt de nodes."},
/*
* Note to translators: "<xsl:when>" and "<xsl:choose>" are keywords
* and should not be translated. This message describes a syntax error
* in the stylesheet.
*/
{ErrorMsg.MISSING_WHEN_ERR,
"Es necessita com a m\u00ednim un element <xsl:when> a <xsl:choose>."},
/*
* Note to translators: "<xsl:otherwise>" and "<xsl:choose>" are
* keywords and should not be translated. This message describes a
* syntax error in the stylesheet.
*/
{ErrorMsg.MULTIPLE_OTHERWISE_ERR,
"Nom\u00e9s es permet un element <xsl:otherwise> a <xsl:choose>."},
/*
* Note to translators: "<xsl:otherwise>" and "<xsl:choose>" are
* keywords and should not be translated. This message describes a
* syntax error in the stylesheet.
*/
{ErrorMsg.STRAY_OTHERWISE_ERR,
"<xsl:otherwise> nom\u00e9s es pot utilitzar dins de <xsl:choose>."},
/*
* Note to translators: "<xsl:when>" and "<xsl:choose>" are keywords
* and should not be translated. This message describes a syntax error
* in the stylesheet.
*/
{ErrorMsg.STRAY_WHEN_ERR,
"<xsl:when> nom\u00e9s es pot utilitzar dins de <xsl:choose>."},
/*
* Note to translators: "<xsl:when>", "<xsl:otherwise>" and
* "<xsl:choose>" are keywords and should not be translated. This
* message describes a syntax error in the stylesheet.
*/
{ErrorMsg.WHEN_ELEMENT_ERR,
"A <xsl:choose> nom\u00e9s es permeten els elements <xsl:when> i <xsl:otherwise>."},
/*
* Note to translators: "<xsl:attribute-set>" and "name" are keywords
* that should not be translated.
*/
{ErrorMsg.UNNAMED_ATTRIBSET_ERR,
"L'atribut 'name' falta a <xsl:attribute-set>."},
/*
* Note to translators: An element in the stylesheet contained an
* element of a type that it was not permitted to contain.
*/
{ErrorMsg.ILLEGAL_CHILD_ERR,
"L'element subordinat no \u00e9s perm\u00e8s."},
/*
* Note to translators: The stylesheet tried to create an element with
* a name that was not a valid XML name. The substitution text contains
* the name.
*/
{ErrorMsg.ILLEGAL_ELEM_NAME_ERR,
"No podeu cridar un element ''{0}''"},
/*
* Note to translators: The stylesheet tried to create an attribute
* with a name that was not a valid XML name. The substitution text
* contains the name.
*/
{ErrorMsg.ILLEGAL_ATTR_NAME_ERR,
"No podeu cridar un atribut ''{0}''"},
/*
* Note to translators: The children of the outermost element of a
* stylesheet are referred to as top-level elements. No text should
* occur within that outermost element unless it is within a top-level
* element. This message indicates that that constraint was violated.
* "<xsl:stylesheet>" is a keyword that should not be translated.
*/
{ErrorMsg.ILLEGAL_TEXT_NODE_ERR,
"Hi ha dades fora de l'element de nivell superior <xsl:stylesheet>."},
/*
* Note to translators: JAXP is an acronym for the Java API for XML
* Processing. This message indicates that the XML parser provided to
* XSLTC to process the XML input document had a configuration problem.
*/
{ErrorMsg.SAX_PARSER_CONFIG_ERR,
"L'analitzador JAXP no s'ha configurat correctament"},
/*
* Note to translators: The substitution text names the internal error
* encountered.
*/
{ErrorMsg.INTERNAL_ERR,
"S''ha produ\u00eft un error intern d''XSLTC irrecuperable: ''{0}''"},
/*
* Note to translators: The stylesheet contained an element that was
* not recognized as part of the XSL syntax. The substitution text
* gives the element name.
*/
{ErrorMsg.UNSUPPORTED_XSL_ERR,
"L''element d''XSL ''{0}'' no t\u00e9 suport."},
/*
* Note to translators: The stylesheet referred to an extension to the
* XSL syntax and indicated that it was defined by XSLTC, but XSTLC does
* not recognized the particular extension named. The substitution text
* gives the extension name.
*/
{ErrorMsg.UNSUPPORTED_EXT_ERR,
"No es reconeix l''extensi\u00f3 d''XSLTC ''{0}''."},
/*
* Note to translators: The XML document given to XSLTC as a stylesheet
* was not, in fact, a stylesheet. XSLTC is able to detect that in this
* case because the outermost element in the stylesheet has to be
* declared with respect to the XSL namespace URI, but no declaration
* for that namespace was seen.
*/
{ErrorMsg.MISSING_XSLT_URI_ERR,
"El document d'entrada no \u00e9s un full d'estils (l'espai de noms XSL no s'ha declarat en l'element arrel)."},
/*
* Note to translators: XSLTC could not find the stylesheet document
* with the name specified by the substitution text.
*/
{ErrorMsg.MISSING_XSLT_TARGET_ERR,
"No s''ha trobat la destinaci\u00f3 ''{0}'' del full d''estils."},
/*
* Note to translators: access to the stylesheet target is denied
*/
{ErrorMsg.ACCESSING_XSLT_TARGET_ERR,
"Could not read stylesheet target ''{0}'', because ''{1}'' access is not allowed."},
/*
* Note to translators: This message represents an internal error in
* condition in XSLTC. The substitution text is the class name in XSLTC
* that is missing some functionality.
*/
{ErrorMsg.NOT_IMPLEMENTED_ERR,
"No s''ha implementat ''{0}''."},
/*
* Note to translators: The XML document given to XSLTC as a stylesheet
* was not, in fact, a stylesheet.
*/
{ErrorMsg.NOT_STYLESHEET_ERR,
"El document d'entrada no cont\u00e9 cap full d'estils XSL."},
/*
* Note to translators: The element named in the substitution text was
* encountered in the stylesheet but is not recognized.
*/
{ErrorMsg.ELEMENT_PARSE_ERR,
"No s''ha pogut analitzar l''element ''{0}''"},
/*
* Note to translators: "use", "<key>", "node", "node-set", "string"
* and "number" are keywords in this context and should not be
* translated. This message indicates that the value of the "use"
* attribute was not one of the permitted values.
*/
{ErrorMsg.KEY_USE_ATTR_ERR,
"L'atribut use de <key> ha de ser node, node-set, string o number."},
/*
* Note to translators: An XML document can specify the version of the
* XML specification to which it adheres. This message indicates that
* the version specified for the output document was not valid.
*/
{ErrorMsg.OUTPUT_VERSION_ERR,
"La versi\u00f3 del document XML de sortida ha de ser 1.0"},
/*
* Note to translators: The operator in a comparison operation was
* not recognized.
*/
{ErrorMsg.ILLEGAL_RELAT_OP_ERR,
"L'operador de l'expressi\u00f3 relacional \u00e9s desconegut."},
/*
* Note to translators: An attribute set defines as a set of XML
* attributes that can be added to an element in the output XML document
* as a group. This message is reported if the name specified was not
* used to declare an attribute set. The substitution text is the name
* that is in error.
*/
{ErrorMsg.ATTRIBSET_UNDEF_ERR,
"S''ha intentat utilitzar el conjunt d''atributs ''{0}'' que no existeix."},
/*
* Note to translators: The term "attribute value template" is a term
* defined by XSLT which describes the value of an attribute that is
* determined by an XPath expression. The message indicates that the
* expression was syntactically incorrect; the substitution text
* contains the expression that was in error.
*/
{ErrorMsg.ATTR_VAL_TEMPLATE_ERR,
"No es pot analitzar la plantilla de valors d''atributs ''{0}''."},
/*
* Note to translators: ???
*/
{ErrorMsg.UNKNOWN_SIG_TYPE_ERR,
"El tipus de dades de la signatura de la classe ''{0}'' \u00e9s desconegut."},
/*
* Note to translators: The substitution text refers to data types.
* The message is displayed if a value in a particular context needs to
* be converted to type {1}, but that's not possible for a value of
* type {0}.
*/
{ErrorMsg.DATA_CONVERSION_ERR,
"No es pot convertir el tipus de dades ''{0}'' en ''{1}''."},
/*
* Note to translators: "Templates" is a Java class name that should
* not be translated.
*/
{ErrorMsg.NO_TRANSLET_CLASS_ERR,
"Templates no cont\u00e9 cap definici\u00f3 de classe translet."},
/*
* Note to translators: "Templates" is a Java class name that should
* not be translated.
*/
{ErrorMsg.NO_MAIN_TRANSLET_ERR,
"Templates no cont\u00e9 cap classe amb el nom ''{0}''."},
/*
* Note to translators: The substitution text is the name of a class.
*/
{ErrorMsg.TRANSLET_CLASS_ERR,
"No s''ha pogut carregar la classe translet ''{0}''."},
{ErrorMsg.TRANSLET_OBJECT_ERR,
"La classe translet s''ha carregat, per\u00f2 no es pot crear la inst\u00e0ncia translet."},
/*
* Note to translators: "ErrorListener" is a Java interface name that
* should not be translated. The message indicates that the user tried
* to set an ErrorListener object on object of the class named in the
* substitution text with "null" Java value.
*/
{ErrorMsg.ERROR_LISTENER_NULL_ERR,
"S''ha intentat establir ErrorListener de ''{0}'' en un valor nul."},
/*
* Note to translators: StreamSource, SAXSource and DOMSource are Java
* interface names that should not be translated.
*/
{ErrorMsg.JAXP_UNKNOWN_SOURCE_ERR,
"XSLTC nom\u00e9s d\u00f3na suport a StreamSource, SAXSource i DOMSource."},
/*
* Note to translators: "Source" is a Java class name that should not
* be translated. The substitution text is the name of Java method.
*/
{ErrorMsg.JAXP_NO_SOURCE_ERR,
"L''objecte source donat a ''{0}'' no t\u00e9 contingut."},
/*
* Note to translators: The message indicates that XSLTC failed to
* compile the stylesheet into a translet (class file).
*/
{ErrorMsg.JAXP_COMPILE_ERR,
"No s'ha pogut compilar el full d'estils."},
/*
* Note to translators: "TransformerFactory" is a class name. In this
* context, an attribute is a property or setting of the
* TransformerFactory object. The substitution text is the name of the
* unrecognised attribute. The method used to retrieve the attribute is
* "getAttribute", so it's not clear whether it would be best to
* translate the term "attribute".
*/
{ErrorMsg.JAXP_INVALID_ATTR_ERR,
"TransformerFactory no reconeix l''atribut ''{0}''."},
/*
* Note to translators: "setResult()" and "startDocument()" are Java
* method names that should not be translated.
*/
{ErrorMsg.JAXP_SET_RESULT_ERR,
"setResult() s'ha de cridar abans de startDocument()."},
/*
* Note to translators: "Transformer" is a Java interface name that
* should not be translated. A Transformer object should contained a
* reference to a translet object in order to be used for
* transformations; this message is produced if that requirement is not
* met.
*/
{ErrorMsg.JAXP_NO_TRANSLET_ERR,
"Transformer no cont\u00e9 cap objecte translet."},
/*
* Note to translators: The XML document that results from a
* transformation needs to be sent to an output handler object; this
* message is produced if that requirement is not met.
*/
{ErrorMsg.JAXP_NO_HANDLER_ERR,
"No s'ha definit cap manejador de sortida per al resultat de transformaci\u00f3."},
/*
* Note to translators: "Result" is a Java interface name in this
* context. The substitution text is a method name.
*/
{ErrorMsg.JAXP_NO_RESULT_ERR,
"L''objecte result donat a ''{0}'' no \u00e9s v\u00e0lid."},
/*
* Note to translators: "Transformer" is a Java interface name. The
* user's program attempted to access an unrecognized property with the
* name specified in the substitution text. The method used to retrieve
* the property is "getOutputProperty", so it's not clear whether it
* would be best to translate the term "property".
*/
{ErrorMsg.JAXP_UNKNOWN_PROP_ERR,
"S''ha intentat accedir a una propietat Transformer ''{0}'' no v\u00e0lida."},
/*
* Note to translators: SAX2DOM is the name of a Java class that should
* not be translated. This is an adapter in the sense that it takes a
* DOM object and converts it to something that uses the SAX API.
*/
{ErrorMsg.SAX2DOM_ADAPTER_ERR,
"No s''ha pogut crear l''adaptador SAX2DOM ''{0}''."},
/*
* Note to translators: "XSLTCSource.build()" is a Java method name.
* "systemId" is an XML term that is short for "system identification".
*/
{ErrorMsg.XSLTC_SOURCE_ERR,
"S'ha cridat XSLTCSource.build() sense que s'hagu\u00e9s establert la identificaci\u00f3 del sistema."},
{ErrorMsg.COMPILE_STDIN_ERR,
"L'opci\u00f3 -i s'ha d'utilitzar amb l'opci\u00f3 -o."},
/*
* Note to translators: This message contains usage information for a
* means of invoking XSLTC from the command-line. The message is
* formatted for presentation in English. The strings <output>,
* <directory>, etc. indicate user-specified argument values, and can
* be translated - the argument <package> refers to a Java package, so
* it should be handled in the same way the term is handled for JDK
* documentation.
*/
{ErrorMsg.COMPILE_USAGE_STR,
"RESUM\n java com.sun.org.apache.xalan.internal.xsltc.cmdline.Compile [-o <sortida>]\n [-d <directori>] [-j <fitxer_jar>] [-p <paquet>]\n [-n] [-x] [-s] [-u] [-v] [-h] { <full_estils> | -i }\n\nOPCIONS\n -o <sortida> assigna el nom <sortida> al translet\n generat. Per defecte, el nom de translet\n s'obt\u00e9 del nom de <full_estils>. Aquesta opci\u00f3\n no es t\u00e9 en compte si es compilen diversos fulls d'estils.\n -d <directori> especifica un directori de destinaci\u00f3 per al translet\n -j <fitxer_jar> empaqueta les classes translet en un fitxer jar del nom\n especificat com a <fitxer_jar>\n -p <paquet> especifica un prefix de nom de paquet per a totes les classes\n translet generades.\n -n habilita l'inlining (com a mitjana, el funcionament per defecte\n \u00e9s millor).\n -x habilita la sortida de missatges de depuraci\u00f3 addicionals\n -s inhabilita la crida de System.exit\n -u interpreta els arguments <full_estils> com URL\n -i obliga el compilador a llegir el full d'estils des de l'entrada est\u00e0ndard\n -v imprimeix la versi\u00f3 del compilador\n -h imprimeix aquesta sent\u00e8ncia d'\u00fas.\n"},
/*
* Note to translators: This message contains usage information for a
* means of invoking XSLTC from the command-line. The message is
* formatted for presentation in English. The strings <jarfile>,
* <document>, etc. indicate user-specified argument values, and can
* be translated - the argument <class> refers to a Java class, so it
* should be handled in the same way the term is handled for JDK
* documentation.
*/
{ErrorMsg.TRANSFORM_USAGE_STR,
"RESUM \n java com.sun.org.apache.xalan.internal.xsltc.cmdline.Transform [-j <fitxer_jar>]\n [-x] [-s] [-n <iteracions>] {-u <url_document> | <document>}\n <classe> [<par\u00e0m1>=<valor1> ...]\n\n utilitza la <classe> translet per transformar un document XML\n especificat com a <document>. La <classe> translet es troba\n o b\u00e9 a la CLASSPATH de l'usuari o b\u00e9 al <fitxer_jar> que es pot especificar opcionalment.\nOPCIONS\n -j <fitxer_jar> especifica un fitxer jar des del qual es pot carregar el translet\n -x habilita la sortida de missatges de depuraci\u00f3 addicionals\n -s inhabilita la crida de System.exit\n -n <iteracions> executa la transformaci\u00f3 el nombre de vegades <iteracions> i\n mostra informaci\u00f3 de perfil\n -u <url_document> especifica el document d'entrada XML com una URL\n"},
/*
* Note to translators: "<xsl:sort>", "<xsl:for-each>" and
* "<xsl:apply-templates>" are keywords that should not be translated.
* The message indicates that an xsl:sort element must be a child of
* one of the other kinds of elements mentioned.
*/
{ErrorMsg.STRAY_SORT_ERR,
"<xsl:sort> nom\u00e9s es pot utilitzar amb <xsl:for-each> o <xsl:apply-templates>."},
/*
* Note to translators: The message indicates that the encoding
* requested for the output document was on that requires support that
* is not available from the Java Virtual Machine being used to execute
* the program.
*/
{ErrorMsg.UNSUPPORTED_ENCODING,
"Aquesta JVM no d\u00f3na suport a la codificaci\u00f3 de sortida ''{0}''."},
/*
* Note to translators: The message indicates that the XPath expression
* named in the substitution text was not well formed syntactically.
*/
{ErrorMsg.SYNTAX_ERR,
"S''ha produ\u00eft un error de sintaxi a ''{0}''."},
/*
* Note to translators: The substitution text is the name of a Java
* class. The term "constructor" here is the Java term. The message is
* displayed if XSLTC could not find a constructor for the specified
* class.
*/
{ErrorMsg.CONSTRUCTOR_NOT_FOUND,
"No s''ha trobat el constructor extern ''{0}''."},
/*
* Note to translators: "static" is the Java keyword. The substitution
* text is the name of a function. The first argument of that function
* is not of the required type.
*/
{ErrorMsg.NO_JAVA_FUNCT_THIS_REF,
"El primer argument de la funci\u00f3 Java no static ''{0}'' no \u00e9s una refer\u00e8ncia d''objecte v\u00e0lida."},
/*
* Note to translators: An XPath expression was not of the type
* required in a particular context. The substitution text is the
* expression that was in error.
*/
{ErrorMsg.TYPE_CHECK_ERR,
"S''ha produ\u00eft un error en comprovar el tipus de l''expressi\u00f3 ''{0}''."},
/*
* Note to translators: An XPath expression was not of the type
* required in a particular context. However, the location of the
* problematic expression is unknown.
*/
{ErrorMsg.TYPE_CHECK_UNK_LOC_ERR,
"S'ha produ\u00eft un error en comprovar el tipus d'expressi\u00f3 en una ubicaci\u00f3 desconeguda."},
/*
* Note to translators: The substitution text is the name of a command-
* line option that was not recognized.
*/
{ErrorMsg.ILLEGAL_CMDLINE_OPTION_ERR,
"L''opci\u00f3 de l\u00ednia d''ordres ''{0}'' no \u00e9s v\u00e0lida."},
/*
* Note to translators: The substitution text is the name of a command-
* line option.
*/
{ErrorMsg.CMDLINE_OPT_MISSING_ARG_ERR,
"A l''opci\u00f3 de l\u00ednia d''ordres ''{0}'' li falta un argument obligatori."},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text contains two error
* messages. The spacing before the second substitution text indents
* it the same amount as the first in English.
*/
{ErrorMsg.WARNING_PLUS_WRAPPED_MSG,
"AV\u00cdS: ''{0}''\n :{1}"},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text is an error message.
*/
{ErrorMsg.WARNING_MSG,
"AV\u00cdS: ''{0}''"},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text contains two error
* messages. The spacing before the second substitution text indents
* it the same amount as the first in English.
*/
{ErrorMsg.FATAL_ERR_PLUS_WRAPPED_MSG,
"ERROR MOLT GREU: ''{0}''\n :{1}"},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text is an error message.
*/
{ErrorMsg.FATAL_ERR_MSG,
"ERROR MOLT GREU: ''{0}''"},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text contains two error
* messages. The spacing before the second substitution text indents
* it the same amount as the first in English.
*/
{ErrorMsg.ERROR_PLUS_WRAPPED_MSG,
"ERROR: ''{0}''\n :{1}"},
/*
* Note to translators: This message is used to indicate the severity
* of another message. The substitution text is an error message.
*/
{ErrorMsg.ERROR_MSG,
"ERROR: ''{0}''"},
/*
* Note to translators: The substitution text is the name of a class.
*/
{ErrorMsg.TRANSFORM_WITH_TRANSLET_STR,
"Transformaci\u00f3 mitjan\u00e7ant translet ''{0}'' "},
/*
* Note to translators: The first substitution is the name of a class,
* while the second substitution is the name of a jar file.
*/
{ErrorMsg.TRANSFORM_WITH_JAR_STR,
"Transformaci\u00f3 mitjan\u00e7ant translet ''{0}'' des del fitxer jar ''{1}''"},
/*
* Note to translators: "TransformerFactory" is the name of a Java
* interface and must not be translated. The substitution text is
* the name of the class that could not be instantiated.
*/
{ErrorMsg.COULD_NOT_CREATE_TRANS_FACT,
"No s''ha pogut crear una inst\u00e0ncia de la classe TransformerFactory ''{0}''."},
/*
* Note to translators: The following message is used as a header.
* All the error messages are collected together and displayed beneath
* this message.
*/
{ErrorMsg.COMPILER_ERROR_KEY,
"Errors del compilador:"},
/*
* Note to translators: The following message is used as a header.
* All the warning messages are collected together and displayed
* beneath this message.
*/
{ErrorMsg.COMPILER_WARNING_KEY,
"Avisos del compilador:"},
/*
* Note to translators: The following message is used as a header.
* All the error messages that are produced when the stylesheet is
* applied to an input document are collected together and displayed
* beneath this message. A 'translet' is the compiled form of a
* stylesheet (see above).
*/
{ErrorMsg.RUNTIME_ERROR_KEY,
"Errors de translet:"},
{ErrorMsg.JAXP_SECUREPROCESSING_FEATURE,
"FEATURE_SECURE_PROCESSING: Cannot set the feature to false when security manager is present."}
};
}
}
|
apache/ofbiz | 37,753 | applications/accounting/src/main/java/org/apache/ofbiz/accounting/thirdparty/cybersource/IcsPaymentServices.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.accounting.thirdparty.cybersource;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import org.apache.ofbiz.accounting.payment.PaymentGatewayServices;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.SSLUtil;
import org.apache.ofbiz.base.util.StringUtil;
import org.apache.ofbiz.base.util.UtilGenerics;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilNumber;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.base.util.string.FlexibleStringExpander;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.util.EntityUtilProperties;
import org.apache.ofbiz.service.DispatchContext;
import org.apache.ofbiz.service.ServiceUtil;
import com.cybersource.ws.client.Client;
import com.cybersource.ws.client.ClientException;
import com.cybersource.ws.client.FaultException;
/**
* CyberSource WS Integration Services
*/
public class IcsPaymentServices {
public static final String module = IcsPaymentServices.class.getName();
private static int decimals = UtilNumber.getBigDecimalScale("invoice.decimals");
private static int rounding = UtilNumber.getBigDecimalRoundingMode("invoice.rounding");
public final static String resource = "AccountingUiLabels";
// load the JSSE properties
static {
SSLUtil.loadJsseProperties();
}
public static Map<String, Object> ccAuth(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildAuthRequest(context, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", module);
Debug.logError(e, "Fault : " + e.getFaultString(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processAuthResult(reply, result, delegator);
return result;
}
public static Map<String, Object> ccReAuth(DispatchContext dctx, Map<String, ? extends Object> context) {
return ServiceUtil.returnSuccess();
}
public static Map<String, Object> ccCapture(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
//lets see if there is a auth transaction already in context
GenericValue authTransaction = (GenericValue) context.get("authTrans");
Locale locale = (Locale) context.get("locale");
if (authTransaction == null) {
authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
}
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingPaymentTransactionAuthorizationNotFoundCannotCapture", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildCaptureRequest(context, authTransaction, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processCaptureResult(reply, result);
return result;
}
public static Map<String, Object> ccRelease(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
Locale locale = (Locale) context.get("locale");
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRelease", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildReleaseRequest(context, authTransaction);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processReleaseResult(reply, result);
return result;
}
public static Map<String, Object> ccRefund(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
Locale locale = (Locale) context.get("locale");
GenericValue authTransaction = PaymentGatewayServices.getAuthTransaction(orderPaymentPreference);
if (authTransaction == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingPaymentTransactionAuthorizationNotFoundCannotRefund", locale));
}
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildRefundRequest(context, authTransaction, delegator);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processRefundResult(reply, result);
return result;
}
public static Map<String, Object> ccCredit(DispatchContext dctx, Map<String, ? extends Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
// generate the request/properties
Properties props = buildCsProperties(context, delegator);
if (props == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorGettingPaymentGatewayConfig", locale));
}
Map<String, Object> request = buildCreditRequest(context);
request.put("merchantID", props.get("merchantID"));
// transmit the request
Map<String, Object> reply;
try {
reply = UtilGenerics.cast(Client.runTransaction(request, props));
} catch (FaultException e) {
Debug.logError(e, "ERROR: Fault from CyberSource", module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
} catch (ClientException e) {
Debug.logError(e, "ERROR: CyberSource Client exception : " + e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource,
"AccountingCyberSourceErrorCommunicateWithCyberSource", locale));
}
// process the reply
Map<String, Object> result = ServiceUtil.returnSuccess();
processCreditResult(reply, result);
return result;
}
private static Properties buildCsProperties(Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String merchantId = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantId", configString, "payment.cybersource.merchantID");
String targetApi = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "apiVersion", configString, "payment.cybersource.api.version");
String production = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "production", configString, "payment.cybersource.production");
String enableLog = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logEnabled", configString, "payment.cybersource.log");
String logSize = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logSize", configString, "payment.cybersource.log.size");
String logFile = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logFile", configString, "payment.cybersource.log.file");
String logDir = FlexibleStringExpander.expandString(getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "logDir", configString, "payment.cybersource.log.dir"), context);
String keysDir = FlexibleStringExpander.expandString(getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "keysDir", configString, "payment.cybersource.keysDir"), context);
String keysFile = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "keysFile", configString, "payment.cybersource.keysFile");
// some property checking
if (UtilValidate.isEmpty(merchantId)) {
Debug.logWarning("The merchantId property is not configured", module);
return null;
}
if (UtilValidate.isEmpty(keysDir)) {
Debug.logWarning("The keysDir property is not configured", module);
return null;
}
// create some properties for CS Client
Properties props = new Properties();
props.put("merchantID", merchantId);
props.put("keysDirectory", keysDir);
props.put("targetAPIVersion", targetApi);
props.put("sendToProduction", production);
props.put("enableLog", enableLog);
props.put("logDirectory", logDir);
props.put("logFilename", logFile);
props.put("logMaximumSize", logSize);
if (UtilValidate.isNotEmpty(keysFile)) {
props.put("alternateKeyFilename", keysFile);
}
Debug.logInfo("Created CyberSource Properties : " + props, module);
return props;
}
private static Map<String, Object> buildAuthRequest(Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
String currency = (String) context.get("currency");
if (configString == null) {
configString = "payment.properties";
}
// make the request map
String capture = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "autoBill", configString, "payment.cybersource.autoBill", "false");
String orderId = (String) context.get("orderId");
Map<String, Object> request = new HashMap<String, Object>();
request.put("ccAuthService_run", "true"); // run auth service
request.put("ccCaptureService_run", capture); // run capture service (i.e. sale)
request.put("merchantReferenceCode", orderId); // set the order ref number
request.put("purchaseTotals_currency", currency); // set the order currency
appendFullBillingInfo(request, context); // add in all address info
appendItemLineInfo(request, context, "processAmount"); // add in the item info
appendAvsRules(request, context, delegator); // add in the AVS flags and decline codes
return request;
}
private static Map<String, Object> buildCaptureRequest(Map<String, ? extends Object> context, GenericValue authTransaction, Delegator delegator) {
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
String currency = (String) context.get("currency");
if (configString == null) {
configString = "payment.properties";
}
String merchantDesc = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantDescr", configString, "payment.cybersource.merchantDescr", null);
String merchantCont = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantContact", configString, "payment.cybersource.merchantContact", null);
Map<String, Object> request = new HashMap<String, Object>();
request.put("ccCaptureService_run", "true");
request.put("ccCaptureService_authRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "captureAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
// TODO: add support for verbal authorizations
if (merchantDesc != null) {
request.put("invoiceHeader_merchantDescriptor", merchantDesc); // merchant description
}
if (merchantCont != null) {
request.put("invoiceHeader_merchantDescriptorContact", merchantCont); // merchant contact info
}
return request;
}
private static Map<String, Object> buildReleaseRequest(Map<String, ? extends Object> context, GenericValue authTransaction) {
Map<String, Object> request = new HashMap<String, Object>();
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String currency = (String) context.get("currency");
request.put("ccAuthReversalService_run", "true");
request.put("ccAuthReversalService_authRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "releaseAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
return request;
}
private static Map<String, Object> buildRefundRequest(Map<String, ? extends Object> context, GenericValue authTransaction, Delegator delegator) {
GenericValue orderPaymentPreference = (GenericValue) context.get("orderPaymentPreference");
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String currency = (String) context.get("currency");
String merchantDesc = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantDescr", configString, "payment.cybersource.merchantDescr", null);
String merchantCont = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "merchantContact", configString, "payment.cybersource.merchantContact", null);
Map<String, Object> request = new HashMap<String, Object>();
request.put("ccCreditService_run", "true");
request.put("ccCreditService_captureRequestID", authTransaction.getString("referenceNum"));
request.put("item_0_unitPrice", getAmountString(context, "refundAmount"));
request.put("merchantReferenceCode", orderPaymentPreference.getString("orderId"));
request.put("purchaseTotals_currency", currency);
if (merchantDesc != null) {
request.put("invoiceHeader_merchantDescriptor", merchantDesc); // merchant description
}
if (merchantCont != null) {
request.put("invoiceHeader_merchantDescriptorContact", merchantCont); // merchant contact info
}
return request;
}
private static Map<String, Object> buildCreditRequest(Map<String, ? extends Object> context) {
String refCode = (String) context.get("referenceCode");
Map<String, Object> request = new HashMap<String, Object>();
request.put("ccCreditService_run", "true"); // run credit service
request.put("merchantReferenceCode", refCode); // set the ref number could be order id
appendFullBillingInfo(request, context); // add in all address info
appendItemLineInfo(request, context, "creditAmount"); // add in the item info
return request;
}
private static void appendAvsRules(Map<String, Object> request, Map<String, ? extends Object> context, Delegator delegator) {
String paymentGatewayConfigId = (String) context.get("paymentGatewayConfigId");
String configString = (String) context.get("paymentConfig");
if (configString == null) {
configString = "payment.properties";
}
String avsCodes = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "avsDeclineCodes", configString, "payment.cybersource.avsDeclineCodes", null);
GenericValue party = (GenericValue) context.get("billToParty");
if (party != null) {
GenericValue avsOverride = null;
try {
avsOverride = party.getDelegator().findOne("PartyIcsAvsOverride",
UtilMisc.toMap("partyId", party.getString("partyId")), false);
} catch (GenericEntityException e) {
Debug.logError(e, module);
}
if (avsOverride != null && avsOverride.get("avsDeclineString") != null) {
String overrideString = avsOverride.getString("avsDeclineString");
if (UtilValidate.isNotEmpty(overrideString)) {
avsCodes = overrideString;
}
}
}
if (UtilValidate.isNotEmpty(avsCodes)) {
request.put("businessRules_declineAVSFlags", avsCodes);
}
String avsIgnore = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "ignoreAvs", configString, "payment.cybersource.ignoreAvs", "false");
request.put("businessRules_ignoreAVS", avsIgnore);
}
private static void appendFullBillingInfo(Map<String, Object> request, Map<String, ? extends Object> context) {
// contact info
GenericValue email = (GenericValue) context.get("billToEmail");
if (email != null) {
request.put("billTo_email", email.getString("infoString"));
} else {
Debug.logWarning("Email not defined; Cybersource will fail.", module);
}
// phone number seems to not be used; possibly only for reporting.
// CC payment info
GenericValue creditCard = (GenericValue) context.get("creditCard");
if (creditCard != null) {
List<String> expDateList = StringUtil.split(creditCard.getString("expireDate"), "/");
request.put("billTo_firstName", creditCard.getString("firstNameOnCard"));
request.put("billTo_lastName", creditCard.getString("lastNameOnCard"));
request.put("card_accountNumber", creditCard.getString("cardNumber"));
request.put("card_expirationMonth", expDateList.get(0));
request.put("card_expirationYear", expDateList.get(1));
} else {
Debug.logWarning("CreditCard not defined; Cybersource will fail.", module);
}
// CCV info
String cvNum = (String) context.get("cardSecurityCode");
String cvSet = UtilValidate.isEmpty(cvNum) ? "1" : "0";
request.put("card_cvIndicator", cvSet);
if ("1".equals(cvNum)) {
request.put("card_cvNumber", cvNum);
}
// payment contact info
GenericValue billingAddress = (GenericValue) context.get("billingAddress");
if (billingAddress != null) {
request.put("billTo_street1", billingAddress.getString("address1"));
if (billingAddress.get("address2") != null) {
request.put("billTo_street2", billingAddress.getString("address2"));
}
request.put("billTo_city", billingAddress.getString("city"));
String bCountry = billingAddress.get("countryGeoId") != null ? billingAddress.getString("countryGeoId") : "USA";
request.put("billTo_country", bCountry);
request.put("billTo_postalCode", billingAddress.getString("postalCode"));
if (billingAddress.get("stateProvinceGeoId") != null) {
request.put("billTo_state", billingAddress.getString("stateProvinceGeoId"));
}
} else {
Debug.logWarning("BillingAddress not defined; Cybersource will fail.", module);
}
// order shipping information
GenericValue shippingAddress = (GenericValue) context.get("shippingAddress");
if (shippingAddress != null) {
if (creditCard != null) {
// TODO: this is just a kludge since we don't have a firstName and lastName on the PostalAddress entity, that needs to be done
request.put("shipTo_firstName", creditCard.getString("firstNameOnCard"));
request.put("shipTo_lastName", creditCard.getString("lastNameOnCard"));
}
request.put("shipTo_street1", shippingAddress.getString("address1"));
if (shippingAddress.get("address2") != null) {
request.put("shipTo_street2", shippingAddress.getString("address2"));
}
request.put("shipTo_city", shippingAddress.getString("city"));
String sCountry = shippingAddress.get("countryGeoId") != null ? shippingAddress.getString("countryGeoId") : "USA";
request.put("shipTo_country", sCountry);
request.put("shipTo_postalCode", shippingAddress.getString("postalCode"));
if (shippingAddress.get("stateProvinceGeoId") != null) {
request.put("shipTo_state", shippingAddress.getString("stateProvinceGeoId"));
}
}
}
private static void appendItemLineInfo(Map<String, Object> request, Map<String, ? extends Object> context, String amountField) {
// send over a line item total offer w/ the total for billing; don't trust CyberSource for calc
String currency = (String) context.get("currency");
int lineNumber = 0;
request.put("item_" + lineNumber + "_unitPrice", getAmountString(context, amountField));
// the currency
request.put("purchaseTotals_currency", currency);
// create the offers (one for each line item)
List<GenericValue> orderItems = UtilGenerics.cast(context.get("orderItems"));
if (orderItems != null) {
for (Object orderItem : orderItems) {
lineNumber++;
GenericValue item = (GenericValue) orderItem;
GenericValue product = null;
try {
product = item.getRelatedOne("Product", false);
} catch (GenericEntityException e) {
Debug.logError(e, "ERROR: Unable to get Product from OrderItem, not passing info to CyberSource");
}
if (product != null) {
request.put("item_" + lineNumber + "_productName", product.getString("productName"));
request.put("item_" + lineNumber + "_productSKU", product.getString("productId"));
} else {
// no product; just send the item description -- non product items
request.put("item_" + lineNumber + "_productName", item.getString("description"));
}
// get the quantity..
BigDecimal quantity = item.getBigDecimal("quantity");
// test quantity if INT pass as is; if not pass as 1
if (quantity.scale() > 0) {
request.put("item_" + lineNumber + "_quantity", "1");
} else {
request.put("", Integer.toString(quantity.intValue()));
}
// set the amount to 0.0000 -- we will send a total too.
request.put("item_" + lineNumber + "_unitPrice", "0.0000");
}
}
}
private static String getAmountString(Map<String, ? extends Object> context, String amountField) {
BigDecimal processAmount = (BigDecimal) context.get(amountField);
return processAmount.setScale(decimals, rounding).toPlainString();
}
private static void processAuthResult(Map<String, Object> reply, Map<String, Object> result, Delegator delegator) {
String decision = getDecision(reply);
String checkModeStatus = EntityUtilProperties.getPropertyValue("payment", "payment.cybersource.ignoreStatus", delegator);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("authCode", reply.get("ccAuthReply_authorizationCode"));
result.put("authResult", Boolean.TRUE);
} else {
result.put("authCode", decision);
if ("N".equals(checkModeStatus)) {
result.put("authResult", Boolean.FALSE);
} else {
result.put("authResult", Boolean.TRUE);
}
// TODO: based on reasonCode populate the following flags as applicable: resultDeclined, resultNsf, resultBadExpire, resultBadCardNumber
}
if (reply.get("ccAuthReply_amount") != null) {
result.put("processAmount", new BigDecimal((String) reply.get("ccAuthReply_amount")));
} else {
result.put("processAmount", BigDecimal.ZERO);
}
result.put("authRefNum", reply.get("requestID"));
result.put("authFlag", reply.get("ccAuthReply_reasonCode"));
result.put("authMessage", reply.get("ccAuthReply_processorResponse"));
result.put("cvCode", reply.get("ccAuthReply_cvCode"));
result.put("avsCode", reply.get("ccAuthReply_avsCode"));
result.put("scoreCode", reply.get("ccAuthReply_authFactorCode"));
result.put("captureRefNum", reply.get("requestID"));
if (UtilValidate.isNotEmpty(reply.get("ccCaptureReply_reconciliationID"))) {
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("captureResult", Boolean.TRUE);
} else {
result.put("captureResult", Boolean.FALSE);
}
result.put("captureCode", reply.get("ccCaptureReply_reconciliationID"));
result.put("captureFlag", reply.get("ccCaptureReply_reasonCode"));
result.put("captureMessage", reply.get("decision"));
}
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] authorization result : " + result, module);
}
private static void processCaptureResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("captureResult", Boolean.TRUE);
} else {
result.put("captureResult", Boolean.FALSE);
}
if (reply.get("ccCaptureReply_amount") != null) {
result.put("captureAmount", new BigDecimal((String) reply.get("ccCaptureReply_amount")));
} else {
result.put("captureAmount", BigDecimal.ZERO);
}
result.put("captureRefNum", reply.get("requestID"));
result.put("captureCode", reply.get("ccCaptureReply_reconciliationID"));
result.put("captureFlag", reply.get("ccCaptureReply_reasonCode"));
result.put("captureMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] capture result : " + result, module);
}
private static void processReleaseResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("releaseResult", Boolean.TRUE);
} else {
result.put("releaseResult", Boolean.FALSE);
}
if (reply.get("ccAuthReversalReply_amount") != null) {
result.put("releaseAmount", new BigDecimal((String) reply.get("ccAuthReversalReply_amount")));
} else {
result.put("releaseAmount", BigDecimal.ZERO);
}
result.put("releaseRefNum", reply.get("requestID"));
result.put("releaseCode", reply.get("ccAuthReversalReply_reasonCode"));
result.put("releaseFlag", reply.get("reasonCode"));
result.put("releaseMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] release result : " + result, module);
}
private static void processRefundResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = getDecision(reply);
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("refundResult", Boolean.TRUE);
} else {
result.put("refundResult", Boolean.FALSE);
}
if (reply.get("ccCreditReply_amount") != null) {
result.put("refundAmount", new BigDecimal((String) reply.get("ccCreditReply_amount")));
} else {
result.put("refundAmount", BigDecimal.ZERO);
}
result.put("refundRefNum", reply.get("requestID"));
result.put("refundCode", reply.get("ccCreditReply_reconciliationID"));
result.put("refundFlag", reply.get("ccCreditReply_reasonCode"));
result.put("refundMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] refund result : " + result, module);
}
private static void processCreditResult(Map<String, Object> reply, Map<String, Object> result) {
String decision = (String) reply.get("decision");
if ("ACCEPT".equalsIgnoreCase(decision)) {
result.put("creditResult", Boolean.TRUE);
} else {
result.put("creditResult", Boolean.FALSE);
}
if (reply.get("ccCreditReply_amount") != null) {
result.put("creditAmount", new BigDecimal((String) reply.get("ccCreditReply_amount")));
} else {
result.put("creditAmount", BigDecimal.ZERO);
}
result.put("creditRefNum", reply.get("requestID"));
result.put("creditCode", reply.get("ccCreditReply_reconciliationID"));
result.put("creditFlag", reply.get("ccCreditReply_reasonCode"));
result.put("creditMessage", reply.get("decision"));
if (Debug.infoOn())
Debug.logInfo("CC [Cybersource] credit result : " + result, module);
}
private static String getDecision(Map<String, Object> reply) {
String decision = (String) reply.get("decision");
String reasonCode = (String) reply.get("reasonCode");
if (!"ACCEPT".equalsIgnoreCase(decision)) {
Debug.logInfo("CyberSource : " + decision + " (" + reasonCode + ")", module);
Debug.logInfo("Reply Dump : " + reply, module);
}
return decision;
}
private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName,
String resource, String parameterName) {
String returnValue = "";
if (UtilValidate.isNotEmpty(paymentGatewayConfigId)) {
try {
GenericValue cyberSource = EntityQuery.use(delegator).from("PaymentGatewayCyberSource").where("paymentGatewayConfigId", paymentGatewayConfigId).queryOne();
if (cyberSource != null) {
Object cyberSourceField = cyberSource.get(paymentGatewayConfigParameterName);
if (cyberSourceField != null) {
returnValue = cyberSourceField.toString().trim();
}
}
} catch (GenericEntityException e) {
Debug.logError(e, module);
}
} else {
String value = EntityUtilProperties.getPropertyValue(resource, parameterName, delegator);
if (value != null) {
returnValue = value.trim();
}
}
return returnValue;
}
private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName,
String resource, String parameterName, String defaultValue) {
String returnValue = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, paymentGatewayConfigParameterName, resource, parameterName);
if (UtilValidate.isEmpty(returnValue)) {
returnValue = defaultValue;
}
return returnValue;
}
}
|
googleapis/google-cloud-java | 37,938 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListReportsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/reports_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Request message for
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListReportsRequest}
*/
@java.lang.Deprecated
public final class ListReportsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListReportsRequest)
ListReportsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListReportsRequest.newBuilder() to construct.
private ListReportsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListReportsRequest() {
parent_ = "";
pageToken_ = "";
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListReportsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListReportsRequest.class,
com.google.cloud.channel.v1.ListReportsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Requested page size of the report. The server might return fewer
* results than requested. If unspecified, returns 20 reports. The maximum
* value is 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.ListReportsRequest)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.ListReportsRequest other =
(com.google.cloud.channel.v1.ListReportsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.channel.v1.ListReportsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListReportsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListReportsRequest)
com.google.cloud.channel.v1.ListReportsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListReportsRequest.class,
com.google.cloud.channel.v1.ListReportsRequest.Builder.class);
}
// Construct using com.google.cloud.channel.v1.ListReportsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsRequest getDefaultInstanceForType() {
return com.google.cloud.channel.v1.ListReportsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsRequest build() {
com.google.cloud.channel.v1.ListReportsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsRequest buildPartial() {
com.google.cloud.channel.v1.ListReportsRequest result =
new com.google.cloud.channel.v1.ListReportsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.channel.v1.ListReportsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.languageCode_ = languageCode_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.ListReportsRequest) {
return mergeFrom((com.google.cloud.channel.v1.ListReportsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.channel.v1.ListReportsRequest other) {
if (other == com.google.cloud.channel.v1.ListReportsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the partner account to list available
* reports for. Parent uses the format: accounts/{account_id}
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Requested page size of the report. The server might return fewer
* results than requested. If unspecified, returns 20 reports. The maximum
* value is 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Requested page size of the report. The server might return fewer
* results than requested. If unspecified, returns 20 reports. The maximum
* value is 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Requested page size of the report. The server might return fewer
* results than requested. If unspecified, returns 20 reports. The maximum
* value is 100.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token that specifies a page of results beyond the first page.
* Obtained through
* [ListReportsResponse.next_page_token][google.cloud.channel.v1.ListReportsResponse.next_page_token]
* of the previous
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports]
* call.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The BCP-47 language code, such as "en-US". If specified, the
* response is localized to the corresponding language code if the
* original data sources support it.
* Default is "en-US".
* </pre>
*
* <code>string language_code = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListReportsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListReportsRequest)
private static final com.google.cloud.channel.v1.ListReportsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListReportsRequest();
}
public static com.google.cloud.channel.v1.ListReportsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListReportsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListReportsRequest>() {
@java.lang.Override
public ListReportsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListReportsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListReportsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,923 | java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/QueryCompilationResultActionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* `QueryCompilationResultActions` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest}
*/
public final class QueryCompilationResultActionsRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest)
QueryCompilationResultActionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use QueryCompilationResultActionsRequest.newBuilder() to construct.
private QueryCompilationResultActionsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QueryCompilationResultActionsRequest() {
name_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QueryCompilationResultActionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryCompilationResultActionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryCompilationResultActionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest.class,
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Maximum number of compilation results to return. The server may
* return fewer items than requested. If unspecified, the server will pick an
* appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest other =
(com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest) obj;
if (!getName().equals(other.getName())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `QueryCompilationResultActions` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest)
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryCompilationResultActionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryCompilationResultActionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest.class,
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest.Builder.class);
}
// Construct using
// com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_QueryCompilationResultActionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest build() {
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest buildPartial() {
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest result =
new com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest) {
return mergeFrom(
(com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest other) {
if (other
== com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The compilation result's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Maximum number of compilation results to return. The server may
* return fewer items than requested. If unspecified, the server will pick an
* appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Maximum number of compilation results to return. The server may
* return fewer items than requested. If unspecified, the server will pick an
* appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Maximum number of compilation results to return. The server may
* return fewer items than requested. If unspecified, the server will pick an
* appropriate default.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token received from a previous
* `QueryCompilationResultActions` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all other parameters provided to
* `QueryCompilationResultActions`, with the exception of `page_size`, must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Optional filter for the returned list. Filtering is only
* currently supported on the `file_path` field.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest)
private static final com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest();
}
public static com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QueryCompilationResultActionsRequest> PARSER =
new com.google.protobuf.AbstractParser<QueryCompilationResultActionsRequest>() {
@java.lang.Override
public QueryCompilationResultActionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QueryCompilationResultActionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QueryCompilationResultActionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.QueryCompilationResultActionsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,933 | java-recommendations-ai/proto-google-cloud-recommendations-ai-v1beta1/src/main/java/com/google/cloud/recommendationengine/v1beta1/ListUserEventsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommendationengine/v1beta1/user_event_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommendationengine.v1beta1;
/**
*
*
* <pre>
* Response message for ListUserEvents method.
* </pre>
*
* Protobuf type {@code google.cloud.recommendationengine.v1beta1.ListUserEventsResponse}
*/
public final class ListUserEventsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommendationengine.v1beta1.ListUserEventsResponse)
ListUserEventsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListUserEventsResponse.newBuilder() to construct.
private ListUserEventsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListUserEventsResponse() {
userEvents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListUserEventsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommendationengine.v1beta1.UserEventServiceOuterClass
.internal_static_google_cloud_recommendationengine_v1beta1_ListUserEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommendationengine.v1beta1.UserEventServiceOuterClass
.internal_static_google_cloud_recommendationengine_v1beta1_ListUserEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse.class,
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse.Builder.class);
}
public static final int USER_EVENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.recommendationengine.v1beta1.UserEvent> userEvents_;
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.recommendationengine.v1beta1.UserEvent>
getUserEventsList() {
return userEvents_;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder>
getUserEventsOrBuilderList() {
return userEvents_;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
@java.lang.Override
public int getUserEventsCount() {
return userEvents_.size();
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.UserEvent getUserEvents(int index) {
return userEvents_.get(index);
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder getUserEventsOrBuilder(
int index) {
return userEvents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < userEvents_.size(); i++) {
output.writeMessage(1, userEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < userEvents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, userEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse)) {
return super.equals(obj);
}
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse other =
(com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse) obj;
if (!getUserEventsList().equals(other.getUserEventsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getUserEventsCount() > 0) {
hash = (37 * hash) + USER_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getUserEventsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListUserEvents method.
* </pre>
*
* Protobuf type {@code google.cloud.recommendationengine.v1beta1.ListUserEventsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommendationengine.v1beta1.ListUserEventsResponse)
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommendationengine.v1beta1.UserEventServiceOuterClass
.internal_static_google_cloud_recommendationengine_v1beta1_ListUserEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommendationengine.v1beta1.UserEventServiceOuterClass
.internal_static_google_cloud_recommendationengine_v1beta1_ListUserEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse.class,
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse.Builder.class);
}
// Construct using
// com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (userEventsBuilder_ == null) {
userEvents_ = java.util.Collections.emptyList();
} else {
userEvents_ = null;
userEventsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommendationengine.v1beta1.UserEventServiceOuterClass
.internal_static_google_cloud_recommendationengine_v1beta1_ListUserEventsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
getDefaultInstanceForType() {
return com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse build() {
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse buildPartial() {
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse result =
new com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse result) {
if (userEventsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
userEvents_ = java.util.Collections.unmodifiableList(userEvents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.userEvents_ = userEvents_;
} else {
result.userEvents_ = userEventsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse) {
return mergeFrom(
(com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse other) {
if (other
== com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
.getDefaultInstance()) return this;
if (userEventsBuilder_ == null) {
if (!other.userEvents_.isEmpty()) {
if (userEvents_.isEmpty()) {
userEvents_ = other.userEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureUserEventsIsMutable();
userEvents_.addAll(other.userEvents_);
}
onChanged();
}
} else {
if (!other.userEvents_.isEmpty()) {
if (userEventsBuilder_.isEmpty()) {
userEventsBuilder_.dispose();
userEventsBuilder_ = null;
userEvents_ = other.userEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
userEventsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getUserEventsFieldBuilder()
: null;
} else {
userEventsBuilder_.addAllMessages(other.userEvents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.recommendationengine.v1beta1.UserEvent m =
input.readMessage(
com.google.cloud.recommendationengine.v1beta1.UserEvent.parser(),
extensionRegistry);
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
userEvents_.add(m);
} else {
userEventsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.recommendationengine.v1beta1.UserEvent> userEvents_ =
java.util.Collections.emptyList();
private void ensureUserEventsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
userEvents_ =
new java.util.ArrayList<com.google.cloud.recommendationengine.v1beta1.UserEvent>(
userEvents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.UserEvent,
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder,
com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder>
userEventsBuilder_;
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public java.util.List<com.google.cloud.recommendationengine.v1beta1.UserEvent>
getUserEventsList() {
if (userEventsBuilder_ == null) {
return java.util.Collections.unmodifiableList(userEvents_);
} else {
return userEventsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public int getUserEventsCount() {
if (userEventsBuilder_ == null) {
return userEvents_.size();
} else {
return userEventsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public com.google.cloud.recommendationengine.v1beta1.UserEvent getUserEvents(int index) {
if (userEventsBuilder_ == null) {
return userEvents_.get(index);
} else {
return userEventsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder setUserEvents(
int index, com.google.cloud.recommendationengine.v1beta1.UserEvent value) {
if (userEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUserEventsIsMutable();
userEvents_.set(index, value);
onChanged();
} else {
userEventsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder setUserEvents(
int index,
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder builderForValue) {
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
userEvents_.set(index, builderForValue.build());
onChanged();
} else {
userEventsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder addUserEvents(com.google.cloud.recommendationengine.v1beta1.UserEvent value) {
if (userEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUserEventsIsMutable();
userEvents_.add(value);
onChanged();
} else {
userEventsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder addUserEvents(
int index, com.google.cloud.recommendationengine.v1beta1.UserEvent value) {
if (userEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUserEventsIsMutable();
userEvents_.add(index, value);
onChanged();
} else {
userEventsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder addUserEvents(
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder builderForValue) {
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
userEvents_.add(builderForValue.build());
onChanged();
} else {
userEventsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder addUserEvents(
int index,
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder builderForValue) {
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
userEvents_.add(index, builderForValue.build());
onChanged();
} else {
userEventsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder addAllUserEvents(
java.lang.Iterable<? extends com.google.cloud.recommendationengine.v1beta1.UserEvent>
values) {
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, userEvents_);
onChanged();
} else {
userEventsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder clearUserEvents() {
if (userEventsBuilder_ == null) {
userEvents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
userEventsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public Builder removeUserEvents(int index) {
if (userEventsBuilder_ == null) {
ensureUserEventsIsMutable();
userEvents_.remove(index);
onChanged();
} else {
userEventsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder getUserEventsBuilder(
int index) {
return getUserEventsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder getUserEventsOrBuilder(
int index) {
if (userEventsBuilder_ == null) {
return userEvents_.get(index);
} else {
return userEventsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public java.util.List<
? extends com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder>
getUserEventsOrBuilderList() {
if (userEventsBuilder_ != null) {
return userEventsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(userEvents_);
}
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder addUserEventsBuilder() {
return getUserEventsFieldBuilder()
.addBuilder(com.google.cloud.recommendationengine.v1beta1.UserEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder addUserEventsBuilder(
int index) {
return getUserEventsFieldBuilder()
.addBuilder(
index, com.google.cloud.recommendationengine.v1beta1.UserEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* The user events.
* </pre>
*
* <code>repeated .google.cloud.recommendationengine.v1beta1.UserEvent user_events = 1;</code>
*/
public java.util.List<com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder>
getUserEventsBuilderList() {
return getUserEventsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.UserEvent,
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder,
com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder>
getUserEventsFieldBuilder() {
if (userEventsBuilder_ == null) {
userEventsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommendationengine.v1beta1.UserEvent,
com.google.cloud.recommendationengine.v1beta1.UserEvent.Builder,
com.google.cloud.recommendationengine.v1beta1.UserEventOrBuilder>(
userEvents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
userEvents_ = null;
}
return userEventsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If empty, the list is complete. If nonempty, the token to pass to the next
* request's ListUserEvents.page_token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommendationengine.v1beta1.ListUserEventsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommendationengine.v1beta1.ListUserEventsResponse)
private static final com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse();
}
public static com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListUserEventsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListUserEventsResponse>() {
@java.lang.Override
public ListUserEventsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListUserEventsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListUserEventsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommendationengine.v1beta1.ListUserEventsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,161 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/AutoscalingPolicyCpuUtilization.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* CPU utilization policy.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.AutoscalingPolicyCpuUtilization}
*/
public final class AutoscalingPolicyCpuUtilization extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.AutoscalingPolicyCpuUtilization)
AutoscalingPolicyCpuUtilizationOrBuilder {
private static final long serialVersionUID = 0L;
// Use AutoscalingPolicyCpuUtilization.newBuilder() to construct.
private AutoscalingPolicyCpuUtilization(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AutoscalingPolicyCpuUtilization() {
predictiveMethod_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AutoscalingPolicyCpuUtilization();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AutoscalingPolicyCpuUtilization_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AutoscalingPolicyCpuUtilization_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.class,
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.Builder.class);
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* </pre>
*
* Protobuf enum {@code google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.PredictiveMethod}
*/
public enum PredictiveMethod implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PREDICTIVE_METHOD = 0;</code>
*/
UNDEFINED_PREDICTIVE_METHOD(0),
/**
*
*
* <pre>
* No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics
* </pre>
*
* <code>NONE = 2402104;</code>
*/
NONE(2402104),
/**
*
*
* <pre>
* Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* </pre>
*
* <code>OPTIMIZE_AVAILABILITY = 11629437;</code>
*/
OPTIMIZE_AVAILABILITY(11629437),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PREDICTIVE_METHOD = 0;</code>
*/
public static final int UNDEFINED_PREDICTIVE_METHOD_VALUE = 0;
/**
*
*
* <pre>
* No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics
* </pre>
*
* <code>NONE = 2402104;</code>
*/
public static final int NONE_VALUE = 2402104;
/**
*
*
* <pre>
* Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* </pre>
*
* <code>OPTIMIZE_AVAILABILITY = 11629437;</code>
*/
public static final int OPTIMIZE_AVAILABILITY_VALUE = 11629437;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static PredictiveMethod valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static PredictiveMethod forNumber(int value) {
switch (value) {
case 0:
return UNDEFINED_PREDICTIVE_METHOD;
case 2402104:
return NONE;
case 11629437:
return OPTIMIZE_AVAILABILITY;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<PredictiveMethod> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<PredictiveMethod>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<PredictiveMethod>() {
public PredictiveMethod findValueByNumber(int number) {
return PredictiveMethod.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final PredictiveMethod[] VALUES = values();
public static PredictiveMethod valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private PredictiveMethod(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.PredictiveMethod)
}
private int bitField0_;
public static final int PREDICTIVE_METHOD_FIELD_NUMBER = 390220737;
@SuppressWarnings("serial")
private volatile java.lang.Object predictiveMethod_ = "";
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return Whether the predictiveMethod field is set.
*/
@java.lang.Override
public boolean hasPredictiveMethod() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return The predictiveMethod.
*/
@java.lang.Override
public java.lang.String getPredictiveMethod() {
java.lang.Object ref = predictiveMethod_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
predictiveMethod_ = s;
return s;
}
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return The bytes for predictiveMethod.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPredictiveMethodBytes() {
java.lang.Object ref = predictiveMethod_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
predictiveMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int UTILIZATION_TARGET_FIELD_NUMBER = 215905870;
private double utilizationTarget_ = 0D;
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @return Whether the utilizationTarget field is set.
*/
@java.lang.Override
public boolean hasUtilizationTarget() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @return The utilizationTarget.
*/
@java.lang.Override
public double getUtilizationTarget() {
return utilizationTarget_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
output.writeDouble(215905870, utilizationTarget_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 390220737, predictiveMethod_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeDoubleSize(215905870, utilizationTarget_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(390220737, predictiveMethod_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization other =
(com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization) obj;
if (hasPredictiveMethod() != other.hasPredictiveMethod()) return false;
if (hasPredictiveMethod()) {
if (!getPredictiveMethod().equals(other.getPredictiveMethod())) return false;
}
if (hasUtilizationTarget() != other.hasUtilizationTarget()) return false;
if (hasUtilizationTarget()) {
if (java.lang.Double.doubleToLongBits(getUtilizationTarget())
!= java.lang.Double.doubleToLongBits(other.getUtilizationTarget())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPredictiveMethod()) {
hash = (37 * hash) + PREDICTIVE_METHOD_FIELD_NUMBER;
hash = (53 * hash) + getPredictiveMethod().hashCode();
}
if (hasUtilizationTarget()) {
hash = (37 * hash) + UTILIZATION_TARGET_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getUtilizationTarget()));
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CPU utilization policy.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.AutoscalingPolicyCpuUtilization}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.AutoscalingPolicyCpuUtilization)
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilizationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AutoscalingPolicyCpuUtilization_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AutoscalingPolicyCpuUtilization_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.class,
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.Builder.class);
}
// Construct using com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
predictiveMethod_ = "";
utilizationTarget_ = 0D;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AutoscalingPolicyCpuUtilization_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization getDefaultInstanceForType() {
return com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization build() {
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization buildPartial() {
com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization result =
new com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.predictiveMethod_ = predictiveMethod_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.utilizationTarget_ = utilizationTarget_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization) {
return mergeFrom((com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization other) {
if (other == com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization.getDefaultInstance())
return this;
if (other.hasPredictiveMethod()) {
predictiveMethod_ = other.predictiveMethod_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasUtilizationTarget()) {
setUtilizationTarget(other.getUtilizationTarget());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1727246961:
{
utilizationTarget_ = input.readDouble();
bitField0_ |= 0x00000002;
break;
} // case 1727246961
case -1173201398:
{
predictiveMethod_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case -1173201398
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object predictiveMethod_ = "";
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return Whether the predictiveMethod field is set.
*/
public boolean hasPredictiveMethod() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return The predictiveMethod.
*/
public java.lang.String getPredictiveMethod() {
java.lang.Object ref = predictiveMethod_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
predictiveMethod_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return The bytes for predictiveMethod.
*/
public com.google.protobuf.ByteString getPredictiveMethodBytes() {
java.lang.Object ref = predictiveMethod_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
predictiveMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @param value The predictiveMethod to set.
* @return This builder for chaining.
*/
public Builder setPredictiveMethod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
predictiveMethod_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @return This builder for chaining.
*/
public Builder clearPredictiveMethod() {
predictiveMethod_ = getDefaultInstance().getPredictiveMethod();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates whether predictive autoscaling based on CPU metric is enabled. Valid values are: * NONE (default). No predictive method is used. The autoscaler scales the group to meet current demand based on real-time metrics. * OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand.
* Check the PredictiveMethod enum for the list of possible values.
* </pre>
*
* <code>optional string predictive_method = 390220737;</code>
*
* @param value The bytes for predictiveMethod to set.
* @return This builder for chaining.
*/
public Builder setPredictiveMethodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
predictiveMethod_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private double utilizationTarget_;
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @return Whether the utilizationTarget field is set.
*/
@java.lang.Override
public boolean hasUtilizationTarget() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @return The utilizationTarget.
*/
@java.lang.Override
public double getUtilizationTarget() {
return utilizationTarget_;
}
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @param value The utilizationTarget to set.
* @return This builder for chaining.
*/
public Builder setUtilizationTarget(double value) {
utilizationTarget_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, the default is 0.6. If the CPU level is below the target utilization, the autoscaler scales in the number of instances until it reaches the minimum number of instances you specified or until the average CPU of your instances reaches the target utilization. If the average CPU is above the target utilization, the autoscaler scales out until it reaches the maximum number of instances you specified or until the average utilization reaches the target utilization.
* </pre>
*
* <code>optional double utilization_target = 215905870;</code>
*
* @return This builder for chaining.
*/
public Builder clearUtilizationTarget() {
bitField0_ = (bitField0_ & ~0x00000002);
utilizationTarget_ = 0D;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.AutoscalingPolicyCpuUtilization)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.AutoscalingPolicyCpuUtilization)
private static final com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization();
}
public static com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AutoscalingPolicyCpuUtilization> PARSER =
new com.google.protobuf.AbstractParser<AutoscalingPolicyCpuUtilization>() {
@java.lang.Override
public AutoscalingPolicyCpuUtilization parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AutoscalingPolicyCpuUtilization> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AutoscalingPolicyCpuUtilization> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.AutoscalingPolicyCpuUtilization getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 38,075 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.federation.policies.amrmproxy;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.collections4.MapUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.EnhancedHeadroom;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.federation.policies.FederationPolicyInitializationContext;
import org.apache.hadoop.yarn.server.federation.policies.FederationPolicyUtils;
import org.apache.hadoop.yarn.server.federation.policies.dao.WeightedPolicyInfo;
import org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyException;
import org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyInitializationException;
import org.apache.hadoop.yarn.server.federation.policies.exceptions.NoActiveSubclustersException;
import org.apache.hadoop.yarn.server.federation.resolver.SubClusterResolver;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterId;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterIdInfo;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterInfo;
import org.apache.hadoop.yarn.server.federation.utils.FederationStateStoreFacade;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOAD_BASED_SC_SELECTOR_ENABLED;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_LOAD_BASED_SC_SELECTOR_ENABLED;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOAD_BASED_SC_SELECTOR_THRESHOLD;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_LOAD_BASED_SC_SELECTOR_THRESHOLD;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOAD_BASED_SC_SELECTOR_USE_ACTIVE_CORE;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_LOAD_BASED_SC_SELECTOR_USE_ACTIVE_CORE;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOAD_BASED_SC_SELECTOR_MULTIPLIER;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_LOAD_BASED_SC_SELECTOR_MULTIPLIER;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOAD_BASED_SC_SELECTOR_FAIL_ON_ERROR;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_LOAD_BASED_SC_SELECTOR_FAIL_ON_ERROR;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.FEDERATION_BLACKLIST_SUBCLUSTERS;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_FEDERATION_BLACKLIST_SUBCLUSTERS;
/**
* An implementation of the {@link FederationAMRMProxyPolicy} interface that
* carefully multicasts the requests with the following behavior:
*
* <p>
* Host localized {@link ResourceRequest}s are always forwarded to the RM that
* owns the corresponding node, based on the feedback of a
* {@link SubClusterResolver}. If the {@link SubClusterResolver} cannot resolve
* this node we default to forwarding the {@link ResourceRequest} to the home
* sub-cluster.
* </p>
*
* <p>
* Rack localized {@link ResourceRequest}s are forwarded to the RMs that owns
* the corresponding rack. Note that in some deployments each rack could be
* striped across multiple RMs. This policy respects that. If the
* {@link SubClusterResolver} cannot resolve this rack we default to forwarding
* the {@link ResourceRequest} to the home sub-cluster.
* </p>
*
* <p>
* ANY requests corresponding to node/rack local requests are forwarded only to
* the set of RMs that owns the corresponding localized requests. The number of
* containers listed in each ANY is proportional to the number of localized
* container requests (associated to this ANY via the same allocateRequestId).
* </p>
*
* <p>
* ANY that are not associated to node/rack local requests are split among RMs
* based on the "weights" in the {@link WeightedPolicyInfo} configuration *and*
* headroom information. The {@code headroomAlpha} parameter of the policy
* configuration indicates how much headroom contributes to the splitting
* choice. Value of 1.0f indicates the weights are interpreted only as 0/1
* boolean but all splitting is based on the advertised headroom (fallback to
* 1/N for RMs that we don't have headroom info from). An {@code headroomAlpha}
* value of 0.0f means headroom is ignored and all splitting decisions are
* proportional to the "weights" in the configuration of the policy.
* </p>
*
* <p>
* ANY of zero size are forwarded to all known subclusters (i.e., subclusters
* where we scheduled containers before), as they may represent a user attempt
* to cancel a previous request (and we are mostly stateless now, so should
* forward to all known RMs).
* </p>
*
* <p>
* Invariants:
* </p>
*
* <p>
* The policy always excludes non-active RMs.
* </p>
*
* <p>
* The policy always excludes RMs that do not appear in the policy configuration
* weights, or have a weight of 0 (even if localized resources explicit refer to
* it).
* </p>
*
* <p>
* (Bar rounding to closest ceiling of fractional containers) The sum of
* requests made to multiple RMs at the ANY level "adds-up" to the user request.
* The maximum possible excess in a given request is a number of containers less
* or equal to number of sub-clusters in the federation.
* </p>
*/
public class LocalityMulticastAMRMProxyPolicy extends AbstractAMRMProxyPolicy {
public static final Logger LOG =
LoggerFactory.getLogger(LocalityMulticastAMRMProxyPolicy.class);
private static Random rand = new Random();
private Map<SubClusterId, Float> weights;
private SubClusterResolver resolver;
private Configuration conf;
private Map<SubClusterId, Resource> headroom;
private Map<SubClusterId, EnhancedHeadroom> enhancedHeadroom;
private float hrAlpha;
private FederationStateStoreFacade federationFacade;
private SubClusterId homeSubcluster;
private int printRRMax;
public static final String PRINT_RR_MAX =
"yarn.nodemanager.amrmproxy.address.splitmerge.printmaxrrcount";
public static final int DEFAULT_PRINT_RR_MAX = 1000;
private boolean failOnError = DEFAULT_LOAD_BASED_SC_SELECTOR_FAIL_ON_ERROR;
/**
* Print a list of Resource Requests into a one line string.
*
* @param response list of ResourceRequest
* @param max number of ResourceRequest to print
* @return the printed one line string
*/
public static String prettyPrintRequests(List<ResourceRequest> response, int max) {
StringBuilder builder = new StringBuilder();
for (ResourceRequest rr : response) {
builder.append("[id:").append(rr.getAllocationRequestId())
.append(" loc:")
.append(rr.getResourceName())
.append(" num:")
.append(rr.getNumContainers())
.append(" pri:")
.append(((rr.getPriority() != null) ? rr.getPriority().getPriority() : -1))
.append("], ");
if (max != -1) {
if (max-- <= 0) {
break;
}
}
}
return builder.toString();
}
@Override
public void reinitialize(
FederationPolicyInitializationContext policyContext)
throws FederationPolicyInitializationException {
// save reference to old weights
WeightedPolicyInfo tempPolicy = getPolicyInfo();
super.reinitialize(policyContext);
if (!getIsDirty()) {
return;
}
Map<SubClusterId, Float> newWeightsConverted = new HashMap<>();
boolean allInactive = true;
WeightedPolicyInfo policy = getPolicyInfo();
if (policy.getAMRMPolicyWeights() != null
&& policy.getAMRMPolicyWeights().size() > 0) {
for (Map.Entry<SubClusterIdInfo, Float> e : policy.getAMRMPolicyWeights()
.entrySet()) {
if (e.getValue() > 0) {
allInactive = false;
}
newWeightsConverted.put(e.getKey().toId(), e.getValue());
}
}
if (allInactive) {
// reset the policyInfo and throw
setPolicyInfo(tempPolicy);
throw new FederationPolicyInitializationException(
"The weights used to configure "
+ "this policy are all set to zero! (no ResourceRequest could be "
+ "forwarded with this setting.)");
}
if (policyContext.getHomeSubcluster() == null) {
setPolicyInfo(tempPolicy);
throw new FederationPolicyInitializationException("The homeSubcluster "
+ "filed in the context must be initialized to use this policy");
}
weights = newWeightsConverted;
resolver = policyContext.getFederationSubclusterResolver();
// Data structures that only need to initialize once
if (headroom == null) {
headroom = new ConcurrentHashMap<>();
enhancedHeadroom = new ConcurrentHashMap<>();
}
hrAlpha = policy.getHeadroomAlpha();
this.federationFacade =
policyContext.getFederationStateStoreFacade();
this.homeSubcluster = policyContext.getHomeSubcluster();
this.conf = this.federationFacade.getConf();
this.printRRMax = this.conf.getInt(PRINT_RR_MAX, DEFAULT_PRINT_RR_MAX);
this.failOnError = this.conf.getBoolean(LOAD_BASED_SC_SELECTOR_FAIL_ON_ERROR,
DEFAULT_LOAD_BASED_SC_SELECTOR_FAIL_ON_ERROR);
}
@Override
public void notifyOfResponse(SubClusterId subClusterId,
AllocateResponse response) throws YarnException {
if (response.getAvailableResources() != null) {
headroom.put(subClusterId, response.getAvailableResources());
}
if (response.getEnhancedHeadroom() != null) {
this.enhancedHeadroom.put(subClusterId, response.getEnhancedHeadroom());
}
LOG.info(
"Subcluster {} updated with AvailableResource {}, EnhancedHeadRoom {}",
subClusterId, response.getAvailableResources(),
response.getEnhancedHeadroom());
}
@Override
public Map<SubClusterId, List<ResourceRequest>> splitResourceRequests(
List<ResourceRequest> resourceRequests,
Set<SubClusterId> timedOutSubClusters) throws YarnException {
// object used to accumulate statistics about the answer, initialize with
// active subclusters. Create a new instance per call because this method
// can be called concurrently.
AllocationBookkeeper bookkeeper = new AllocationBookkeeper();
bookkeeper.reinitialize(getActiveSubclusters(), timedOutSubClusters, conf);
List<ResourceRequest> nonLocalizedRequests = new ArrayList<>();
SubClusterId targetId = null;
Set<SubClusterId> targetIds = null;
// if the RR is resolved to a local subcluster add it directly (node and
// resolvable racks)
for (ResourceRequest rr : resourceRequests) {
targetId = null;
targetIds = null;
// Handle: ANY (accumulated for later)
if (ResourceRequest.isAnyLocation(rr.getResourceName())) {
nonLocalizedRequests.add(rr);
continue;
}
// Handle "node" requests
try {
targetId = resolver.getSubClusterForNode(rr.getResourceName());
// If needed, re-reroute node requests base on SC load
boolean loadBasedSCSelectorEnabled =
conf.getBoolean(LOAD_BASED_SC_SELECTOR_ENABLED, DEFAULT_LOAD_BASED_SC_SELECTOR_ENABLED);
if (loadBasedSCSelectorEnabled) {
int maxPendingThreshold = conf.getInt(LOAD_BASED_SC_SELECTOR_THRESHOLD,
DEFAULT_LOAD_BASED_SC_SELECTOR_THRESHOLD);
targetId = routeNodeRequestIfNeeded(targetId, maxPendingThreshold,
bookkeeper.getActiveAndEnabledSC());
}
LOG.debug("Node request {}", rr.getResourceName());
} catch (YarnException e) {
// this might happen as we can't differentiate node from rack names
// we log altogether later
}
if (bookkeeper.isActiveAndEnabled(targetId)) {
bookkeeper.addLocalizedNodeRR(targetId, rr);
continue;
}
// Handle "rack" requests
try {
targetIds = resolver.getSubClustersForRack(rr.getResourceName());
} catch (YarnException e) {
// this might happen as we can't differentiate node from rack names
// we log altogether later
}
if (targetIds != null && targetIds.size() > 0) {
boolean hasActive = false;
for (SubClusterId tid : targetIds) {
if (bookkeeper.isActiveAndEnabled(tid)) {
bookkeeper.addRackRR(tid, rr);
hasActive = true;
}
}
if (hasActive) {
continue;
}
}
// Handle node/rack requests that the SubClusterResolver cannot map to
// any cluster. Pick a random sub-cluster from active and enabled ones.
targetId = getSubClusterForUnResolvedRequest(bookkeeper,
rr.getAllocationRequestId());
LOG.debug("ERROR resolving sub-cluster for resourceName: {}, picked a "
+ "random subcluster to forward:{}", rr.getResourceName(), targetId);
if (targetIds != null && targetIds.size() > 0) {
bookkeeper.addRackRR(targetId, rr);
} else {
bookkeeper.addLocalizedNodeRR(targetId, rr);
}
}
// handle all non-localized requests (ANY)
splitAnyRequests(nonLocalizedRequests, bookkeeper);
// Take the split result, feed into the askBalancer
Map<SubClusterId, List<ResourceRequest>> answer = bookkeeper.getAnswer();
LOG.info("Before split {} RRs: {}", resourceRequests.size(),
prettyPrintRequests(resourceRequests, this.printRRMax));
for (Map.Entry<SubClusterId, List<ResourceRequest>> entry : bookkeeper.getAnswer().entrySet()) {
LOG.info("After split {} has {} RRs: {}", entry.getKey(), entry.getValue().size(),
prettyPrintRequests(entry.getValue(), this.printRRMax));
}
return answer;
}
/**
* For unit test to override.
*
* @param bookKeeper bookKeeper
* @param allocationId allocationId.
* @return SubClusterId.
*/
protected SubClusterId getSubClusterForUnResolvedRequest(
AllocationBookkeeper bookKeeper, long allocationId) {
return bookKeeper.getSubClusterForUnResolvedRequest(allocationId);
}
/**
* It splits a list of non-localized resource requests among sub-clusters.
*/
private void splitAnyRequests(List<ResourceRequest> originalResourceRequests,
AllocationBookkeeper allocationBookkeeper) throws YarnException {
for (ResourceRequest resourceRequest : originalResourceRequests) {
// FIRST: pick the target set of subclusters (based on whether this RR
// is associated with other localized requests via an allocationId)
Long allocationId = resourceRequest.getAllocationRequestId();
Set<SubClusterId> targetSubclusters;
if (allocationBookkeeper.getSubClustersForId(allocationId) != null) {
targetSubclusters =
allocationBookkeeper.getSubClustersForId(allocationId);
} else {
targetSubclusters = allocationBookkeeper.getActiveAndEnabledSC();
}
// SECOND: pick how much to ask each RM for each request
splitIndividualAny(resourceRequest, targetSubclusters,
allocationBookkeeper);
}
}
/**
* Return a projection of this ANY {@link ResourceRequest} that belongs to
* this sub-cluster. This is done based on the "count" of the containers that
* require locality in each sublcuster (if any) or based on the "weights" and
* headroom.
*/
private void splitIndividualAny(ResourceRequest originalResourceRequest,
Set<SubClusterId> targetSubclusters,
AllocationBookkeeper allocationBookkeeper) throws YarnException {
long allocationId = originalResourceRequest.getAllocationRequestId();
int numContainer = originalResourceRequest.getNumContainers();
// If the ANY request has 0 containers to begin with we must forward it to
// any RM we have previously contacted (this might be the user way
// to cancel a previous request).
if (numContainer == 0) {
for (SubClusterId targetId : headroom.keySet()) {
allocationBookkeeper.addAnyRR(targetId, originalResourceRequest);
}
return;
}
// List preserves iteration order
List<SubClusterId> targetSCs = new ArrayList<>(targetSubclusters);
// Compute the distribution weights
ArrayList<Float> weightsList = new ArrayList<>();
for (SubClusterId targetId : targetSCs) {
// If ANY is associated with localized asks, split based on their ratio
if (allocationBookkeeper.getSubClustersForId(allocationId) != null) {
weightsList.add(getLocalityBasedWeighting(allocationId, targetId,
allocationBookkeeper));
} else {
// split ANY based on load and policy configuration
float headroomWeighting =
getHeadroomWeighting(targetId, allocationBookkeeper);
float policyWeighting =
getPolicyConfigWeighting(targetId, allocationBookkeeper);
// hrAlpha controls how much headroom influencing decision
weightsList
.add(hrAlpha * headroomWeighting + (1 - hrAlpha) * policyWeighting);
}
}
// Compute the integer container counts for each sub-cluster
ArrayList<Integer> containerNums =
computeIntegerAssignment(numContainer, weightsList);
int i = 0;
for (SubClusterId targetId : targetSCs) {
// if the calculated request is non-empty add it to the answer
if (containerNums.get(i) > 0) {
ResourceRequest out = ResourceRequest.clone(originalResourceRequest);
out.setNumContainers(containerNums.get(i));
if (ResourceRequest.isAnyLocation(out.getResourceName())) {
allocationBookkeeper.addAnyRR(targetId, out);
} else {
allocationBookkeeper.addRackRR(targetId, out);
}
}
i++;
}
}
/**
* Split the integer into bins according to the weights.
*
* @param totalNum total number of containers to split
* @param weightsList the weights for each subcluster
* @return the container allocation after split
* @throws YarnException if fails
*/
@VisibleForTesting
protected ArrayList<Integer> computeIntegerAssignment(int totalNum,
ArrayList<Float> weightsList) throws YarnException {
int i, residue;
ArrayList<Integer> ret = new ArrayList<>();
float totalWeight = 0, totalNumFloat = totalNum;
if (weightsList.size() == 0) {
return ret;
}
for (i = 0; i < weightsList.size(); i++) {
ret.add(0);
if (weightsList.get(i) > 0) {
totalWeight += weightsList.get(i);
}
}
if (totalWeight == 0) {
StringBuilder sb = new StringBuilder();
for (Float weight : weightsList) {
sb.append(weight + ", ");
}
throw new FederationPolicyException(
"No positive value found in weight array " + sb.toString());
}
// First pass, do flooring for all bins
residue = totalNum;
for (i = 0; i < weightsList.size(); i++) {
if (weightsList.get(i) > 0) {
int base = (int) (totalNumFloat * weightsList.get(i) / totalWeight);
ret.set(i, ret.get(i) + base);
residue -= base;
}
}
// By now residue < weights.length, assign one a time
for (i = 0; i < residue; i++) {
int index = FederationPolicyUtils.getWeightedRandom(weightsList);
ret.set(index, ret.get(index) + 1);
}
return ret;
}
/**
* Compute the weight to assign to a subcluster based on how many local
* requests a subcluster is target of.
*/
private float getLocalityBasedWeighting(long reqId, SubClusterId targetId,
AllocationBookkeeper allocationBookkeeper) {
float totWeight = allocationBookkeeper.getTotNumLocalizedContainers(reqId);
float localWeight =
allocationBookkeeper.getNumLocalizedContainers(reqId, targetId);
return totWeight > 0 ? localWeight / totWeight : 0;
}
/**
* Compute the "weighting" to give to a sublcuster based on the configured
* policy weights (for the active subclusters).
*/
private float getPolicyConfigWeighting(SubClusterId targetId,
AllocationBookkeeper allocationBookkeeper) {
float totWeight = allocationBookkeeper.totPolicyWeight;
Float localWeight = allocationBookkeeper.policyWeights.get(targetId);
return (localWeight != null && totWeight > 0) ? localWeight / totWeight : 0;
}
/**
* Compute the weighting based on available headroom. This is proportional to
* the available headroom memory announced by RM, or to 1/N for RMs we have
* not seen yet. If all RMs report zero headroom, we fallback to 1/N again.
*/
private float getHeadroomWeighting(SubClusterId targetId,
AllocationBookkeeper allocationBookkeeper) {
// baseline weight for all RMs
float headroomWeighting =
1 / (float) allocationBookkeeper.getActiveAndEnabledSC().size();
// if we have headroom information for this sub-cluster (and we are safe
// from /0 issues)
if (headroom.containsKey(targetId)
&& allocationBookkeeper.totHeadroomMemory > 0) {
// compute which portion of the RMs that are active/enabled have reported
// their headroom (needed as adjustment factor)
// (note: getActiveAndEnabledSC should never be null/zero)
float ratioHeadroomKnown = allocationBookkeeper.totHeadRoomEnabledRMs
/ (float) allocationBookkeeper.getActiveAndEnabledSC().size();
// headroomWeighting is the ratio of headroom memory in the targetId
// cluster / total memory. The ratioHeadroomKnown factor is applied to
// adjust for missing information and ensure sum of allocated containers
// closely approximate what the user asked (small excess).
headroomWeighting = (headroom.get(targetId).getMemorySize()
/ allocationBookkeeper.totHeadroomMemory) * (ratioHeadroomKnown);
}
return headroomWeighting;
}
/**
* When certain subcluster is too loaded, reroute Node requests going there.
*
* @param targetId current subClusterId where request is sent
* @param maxThreshold threshold for Pending count
* @param activeAndEnabledSCs list of active sc
* @return subClusterId target sc id
*/
protected SubClusterId routeNodeRequestIfNeeded(SubClusterId targetId,
int maxThreshold, Set<SubClusterId> activeAndEnabledSCs) {
// If targetId is not in the active and enabled SC list, reroute the traffic
if (activeAndEnabledSCs.contains(targetId)) {
int targetPendingCount = getSubClusterLoad(targetId);
if (targetPendingCount == -1 || targetPendingCount < maxThreshold) {
return targetId;
}
}
SubClusterId scId = chooseSubClusterIdForMaxLoadSC(targetId, maxThreshold, activeAndEnabledSCs);
return scId;
}
/**
* Check if the current target subcluster is over max load, and if it is
* reroute it.
*
* @param targetId the original target subcluster id
* @param maxThreshold the max load threshold to reroute
* @param activeAndEnabledSCs the list of active and enabled subclusters
* @return targetId if it is within maxThreshold, otherwise a new id
*/
private SubClusterId chooseSubClusterIdForMaxLoadSC(SubClusterId targetId,
int maxThreshold, Set<SubClusterId> activeAndEnabledSCs) {
ArrayList<Float> weight = new ArrayList<>();
ArrayList<SubClusterId> scIds = new ArrayList<>();
int targetLoad = getSubClusterLoad(targetId);
if (targetLoad == -1 || !activeAndEnabledSCs.contains(targetId)) {
// Probably a SC that's not active and enabled. Forcing a reroute
targetLoad = Integer.MAX_VALUE;
}
/*
* Prepare the weight for a random draw among all known SCs.
*
* For SC with pending bigger than maxThreshold / 2, use maxThreshold /
* pending as weight. We multiplied by maxThreshold so that the weight
* won't be too small in value.
*
* For SC with pending less than maxThreshold / 2, we cap the weight at 2
* = (maxThreshold / (maxThreshold / 2)) so that SC with small pending
* will not get a huge weight and thus get swamped.
*/
for (SubClusterId sc : activeAndEnabledSCs) {
int scLoad = getSubClusterLoad(sc);
if (scLoad > targetLoad) {
// Never mind if it is not the most loaded SC
return targetId;
}
if (scLoad <= maxThreshold / 2) {
weight.add(2f);
} else {
weight.add((float) maxThreshold / scLoad);
}
scIds.add(sc);
}
if (weights.size() == 0) {
return targetId;
}
return scIds.get(FederationPolicyUtils.getWeightedRandom(weight));
}
/**
* get the Load data of the subCluster.
*
* @param subClusterId subClusterId.
* @return The number of pending containers for the subCluster.
*/
private int getSubClusterLoad(SubClusterId subClusterId) {
EnhancedHeadroom headroomData = this.enhancedHeadroom.get(subClusterId);
if (headroomData == null) {
return -1;
}
// Use new data from enhanced headroom
boolean useActiveCoreEnabled = conf.getBoolean(LOAD_BASED_SC_SELECTOR_USE_ACTIVE_CORE,
DEFAULT_LOAD_BASED_SC_SELECTOR_USE_ACTIVE_CORE);
// If we consider the number of vCores in the subCluster
if (useActiveCoreEnabled) {
// If the vcore of the subCluster is less than or equal to 0,
// it means that containers cannot be scheduled to this subCluster,
// and we will return a very large number, indicating that the subCluster is unavailable.
if (headroomData.getTotalActiveCores() <= 0) {
return Integer.MAX_VALUE;
}
// Multiply by a constant factor, to ensure the numerator > denominator.
// We will normalize the PendingCount, using PendingCount * multiplier / TotalActiveCores.
long multiplier = conf.getLong(LOAD_BASED_SC_SELECTOR_MULTIPLIER,
DEFAULT_LOAD_BASED_SC_SELECTOR_MULTIPLIER);
double value =
headroomData.getNormalizedPendingCount(multiplier) / headroomData.getTotalActiveCores();
if (value > Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
} else {
return (int) value;
}
} else {
// If the number of vcores in the subCluster is not considered,
// we directly return the number of pending containers in the subCluster.
return headroomData.getTotalPendingCount();
}
}
/**
* This helper class is used to book-keep the requests made to each
* subcluster, and maintain useful statistics to split ANY requests.
*/
protected final class AllocationBookkeeper {
// the answer being accumulated
private Map<SubClusterId, List<ResourceRequest>> answer = new TreeMap<>();
private Map<SubClusterId, Set<Long>> maskForRackDeletion = new HashMap<>();
// stores how many containers we have allocated in each RM for localized
// asks, used to correctly "spread" the corresponding ANY
private Map<Long, Map<SubClusterId, AtomicLong>> countContainersPerRM =
new HashMap<>();
private Map<Long, AtomicLong> totNumLocalizedContainers = new HashMap<>();
// Store the randomly selected subClusterId for unresolved resource requests
// keyed by requestId
private Map<Long, SubClusterId> unResolvedRequestLocation = new HashMap<>();
private Set<SubClusterId> activeAndEnabledSC = new HashSet<>();
private float totHeadroomMemory = 0;
private int totHeadRoomEnabledRMs = 0;
private Map<SubClusterId, Float> policyWeights;
private float totPolicyWeight = 0;
private void reinitialize(
Map<SubClusterId, SubClusterInfo> activeSubclusters,
Set<SubClusterId> timedOutSubClusters, Configuration pConf) throws YarnException {
if (MapUtils.isEmpty(activeSubclusters)) {
throw new YarnRuntimeException("null activeSubclusters received");
}
// reset data structures
answer.clear();
maskForRackDeletion.clear();
countContainersPerRM.clear();
totNumLocalizedContainers.clear();
activeAndEnabledSC.clear();
totHeadroomMemory = 0;
totHeadRoomEnabledRMs = 0;
// save the reference locally in case the weights get reinitialized
// concurrently
policyWeights = weights;
totPolicyWeight = 0;
for (Map.Entry<SubClusterId, Float> entry : policyWeights.entrySet()) {
if (entry.getValue() > 0
&& activeSubclusters.containsKey(entry.getKey())) {
activeAndEnabledSC.add(entry.getKey());
}
}
// subCluster blacklisting from configuration
String blacklistedSubClusters = pConf.get(FEDERATION_BLACKLIST_SUBCLUSTERS,
DEFAULT_FEDERATION_BLACKLIST_SUBCLUSTERS);
if (blacklistedSubClusters != null) {
Collection<String> tempList = StringUtils.getStringCollection(blacklistedSubClusters);
for (String item : tempList) {
activeAndEnabledSC.remove(SubClusterId.newInstance(item.trim()));
}
}
if (activeAndEnabledSC.size() < 1) {
String errorMsg = "None of the subClusters enabled in this Policy (weight > 0) are "
+ "currently active we cannot forward the ResourceRequest(s)";
if (failOnError) {
throw new NoActiveSubclustersException(errorMsg);
} else {
LOG.error(errorMsg + ", continuing by enabling all active subClusters.");
activeAndEnabledSC.addAll(activeSubclusters.keySet());
for (SubClusterId sc : activeSubclusters.keySet()) {
policyWeights.put(sc, 1.0f);
}
}
}
Set<SubClusterId> tmpSCSet = new HashSet<>(activeAndEnabledSC);
tmpSCSet.removeAll(timedOutSubClusters);
if (tmpSCSet.size() < 1) {
LOG.warn("All active and enabled subclusters have expired last "
+ "heartbeat time. Ignore the expiry check for this request.");
} else {
activeAndEnabledSC = tmpSCSet;
}
LOG.info("{} subcluster active, {} subclusters active and enabled",
activeSubclusters.size(), activeAndEnabledSC.size());
// pre-compute the set of subclusters that are both active and enabled by
// the policy weights, and accumulate their total weight
for (SubClusterId sc : activeAndEnabledSC) {
totPolicyWeight += policyWeights.get(sc);
}
// pre-compute headroom-based weights for active/enabled subclusters
for (Map.Entry<SubClusterId, Resource> r : headroom.entrySet()) {
if (activeAndEnabledSC.contains(r.getKey())) {
totHeadroomMemory += r.getValue().getMemorySize();
totHeadRoomEnabledRMs++;
}
}
}
/**
* Add to the answer a localized node request, and keeps track of statistics
* on a per-allocation-id and per-subcluster bases.
*/
private void addLocalizedNodeRR(SubClusterId targetId, ResourceRequest rr) {
Preconditions
.checkArgument(!ResourceRequest.isAnyLocation(rr.getResourceName()));
if (rr.getNumContainers() > 0) {
if (!countContainersPerRM.containsKey(rr.getAllocationRequestId())) {
countContainersPerRM.put(rr.getAllocationRequestId(),
new HashMap<>());
}
if (!countContainersPerRM.get(rr.getAllocationRequestId())
.containsKey(targetId)) {
countContainersPerRM.get(rr.getAllocationRequestId()).put(targetId,
new AtomicLong(0));
}
countContainersPerRM.get(rr.getAllocationRequestId()).get(targetId)
.addAndGet(rr.getNumContainers());
if (!totNumLocalizedContainers
.containsKey(rr.getAllocationRequestId())) {
totNumLocalizedContainers.put(rr.getAllocationRequestId(),
new AtomicLong(0));
}
totNumLocalizedContainers.get(rr.getAllocationRequestId())
.addAndGet(rr.getNumContainers());
}
internalAddToAnswer(targetId, rr, false);
}
/**
* Add a rack-local request to the final answer.
*/
private void addRackRR(SubClusterId targetId, ResourceRequest rr) {
Preconditions
.checkArgument(!ResourceRequest.isAnyLocation(rr.getResourceName()));
internalAddToAnswer(targetId, rr, true);
}
/**
* Add an ANY request to the final answer.
*/
private void addAnyRR(SubClusterId targetId, ResourceRequest rr) {
Preconditions
.checkArgument(ResourceRequest.isAnyLocation(rr.getResourceName()));
internalAddToAnswer(targetId, rr, false);
}
private void internalAddToAnswer(SubClusterId targetId,
ResourceRequest partialRR, boolean isRack) {
if (!isRack) {
if (!maskForRackDeletion.containsKey(targetId)) {
maskForRackDeletion.put(targetId, new HashSet<Long>());
}
maskForRackDeletion.get(targetId)
.add(partialRR.getAllocationRequestId());
}
if (!answer.containsKey(targetId)) {
answer.put(targetId, new ArrayList<ResourceRequest>());
}
answer.get(targetId).add(partialRR);
}
/**
* For requests whose location cannot be resolved, choose an active and
* enabled sub-cluster to forward this requestId to.
*/
private SubClusterId getSubClusterForUnResolvedRequest(long allocationId) {
if (unResolvedRequestLocation.containsKey(allocationId)) {
return unResolvedRequestLocation.get(allocationId);
}
int id = rand.nextInt(activeAndEnabledSC.size());
for (SubClusterId subclusterId : activeAndEnabledSC) {
if (id == 0) {
unResolvedRequestLocation.put(allocationId, subclusterId);
return subclusterId;
}
id--;
}
throw new RuntimeException(
"Should not be here. activeAndEnabledSC size = "
+ activeAndEnabledSC.size() + " id = " + id);
}
/**
* Return all known subclusters associated with an allocation id.
*
* @param allocationId the allocation id considered
*
* @return the list of {@link SubClusterId}s associated with this allocation
* id
*/
private Set<SubClusterId> getSubClustersForId(long allocationId) {
if (countContainersPerRM.get(allocationId) == null) {
return null;
}
return countContainersPerRM.get(allocationId).keySet();
}
/**
* Return the answer accumulated so far.
*
* @return the answer
*/
private Map<SubClusterId, List<ResourceRequest>> getAnswer() {
Iterator<Entry<SubClusterId, List<ResourceRequest>>> answerIter =
answer.entrySet().iterator();
// Remove redundant rack RR before returning the answer
while (answerIter.hasNext()) {
Entry<SubClusterId, List<ResourceRequest>> entry = answerIter.next();
SubClusterId scId = entry.getKey();
Set<Long> mask = maskForRackDeletion.get(scId);
if (mask != null) {
Iterator<ResourceRequest> rrIter = entry.getValue().iterator();
while (rrIter.hasNext()) {
ResourceRequest rr = rrIter.next();
if (!mask.contains(rr.getAllocationRequestId())) {
rrIter.remove();
}
}
}
if (mask == null || entry.getValue().size() == 0) {
answerIter.remove();
LOG.info("removing {} from output because it has only rack RR",
scId);
}
}
return answer;
}
/**
* Return the set of sub-clusters that are both active and allowed by our
* policy (weight > 0).
*
* @return a set of active and enabled {@link SubClusterId}s
*/
private Set<SubClusterId> getActiveAndEnabledSC() {
return activeAndEnabledSC;
}
/**
* Return the total number of container coming from localized requests
* matching an allocation Id.
*/
private long getTotNumLocalizedContainers(long allocationId) {
AtomicLong c = totNumLocalizedContainers.get(allocationId);
return c == null ? 0 : c.get();
}
/**
* Returns the number of containers matching an allocation Id that are
* localized in the targetId subcluster.
*/
private long getNumLocalizedContainers(long allocationId,
SubClusterId targetId) {
AtomicLong c = countContainersPerRM.get(allocationId).get(targetId);
return c == null ? 0 : c.get();
}
/**
* Returns true is the subcluster request is both active and enabled.
*/
private boolean isActiveAndEnabled(SubClusterId targetId) {
if (targetId == null) {
return false;
} else {
return getActiveAndEnabledSC().contains(targetId);
}
}
}
} |
google/ExoPlayer | 38,328 | library/core/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStatsListener.java | /*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static java.lang.Math.max;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.Timeline.Period;
import com.google.android.exoplayer2.Tracks;
import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndException;
import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndFormat;
import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndPlaybackState;
import com.google.android.exoplayer2.analytics.PlaybackStats.PlaybackState;
import com.google.android.exoplayer2.source.LoadEventInfo;
import com.google.android.exoplayer2.source.MediaLoadData;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoSize;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* {@link AnalyticsListener} to gather {@link PlaybackStats} from the player.
*
* <p>For accurate measurements, the listener should be added to the player before loading media,
* i.e., {@link Player#getPlaybackState()} should be {@link Player#STATE_IDLE}.
*
* <p>Playback stats are gathered separately for each playback session, i.e. each window in the
* {@link Timeline} and each single ad.
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@Deprecated
public final class PlaybackStatsListener
implements AnalyticsListener, PlaybackSessionManager.Listener {
/** A listener for {@link PlaybackStats} updates. */
public interface Callback {
/**
* Called when a playback session ends and its {@link PlaybackStats} are ready.
*
* @param eventTime The {@link EventTime} at which the playback session started. Can be used to
* identify the playback session.
* @param playbackStats The {@link PlaybackStats} for the ended playback session.
*/
void onPlaybackStatsReady(EventTime eventTime, PlaybackStats playbackStats);
}
private final PlaybackSessionManager sessionManager;
private final Map<String, PlaybackStatsTracker> playbackStatsTrackers;
private final Map<String, EventTime> sessionStartEventTimes;
@Nullable private final Callback callback;
private final boolean keepHistory;
private final Period period;
private PlaybackStats finishedPlaybackStats;
@Nullable private String discontinuityFromSession;
private long discontinuityFromPositionMs;
private @Player.DiscontinuityReason int discontinuityReason;
private int droppedFrames;
@Nullable private Exception nonFatalException;
private long bandwidthTimeMs;
private long bandwidthBytes;
@Nullable private Format videoFormat;
@Nullable private Format audioFormat;
private VideoSize videoSize;
/**
* Creates listener for playback stats.
*
* @param keepHistory Whether the reported {@link PlaybackStats} should keep the full history of
* events.
* @param callback An optional callback for finished {@link PlaybackStats}.
*/
public PlaybackStatsListener(boolean keepHistory, @Nullable Callback callback) {
this.callback = callback;
this.keepHistory = keepHistory;
sessionManager = new DefaultPlaybackSessionManager();
playbackStatsTrackers = new HashMap<>();
sessionStartEventTimes = new HashMap<>();
finishedPlaybackStats = PlaybackStats.EMPTY;
period = new Period();
videoSize = VideoSize.UNKNOWN;
sessionManager.setListener(this);
}
/**
* Returns the combined {@link PlaybackStats} for all playback sessions this listener was and is
* listening to.
*
* <p>Note that these {@link PlaybackStats} will not contain the full history of events.
*
* @return The combined {@link PlaybackStats} for all playback sessions.
*/
public PlaybackStats getCombinedPlaybackStats() {
PlaybackStats[] allPendingPlaybackStats = new PlaybackStats[playbackStatsTrackers.size() + 1];
allPendingPlaybackStats[0] = finishedPlaybackStats;
int index = 1;
for (PlaybackStatsTracker tracker : playbackStatsTrackers.values()) {
allPendingPlaybackStats[index++] = tracker.build(/* isFinal= */ false);
}
return PlaybackStats.merge(allPendingPlaybackStats);
}
/**
* Returns the {@link PlaybackStats} for the currently playback session, or null if no session is
* active.
*
* @return {@link PlaybackStats} for the current playback session.
*/
@Nullable
public PlaybackStats getPlaybackStats() {
@Nullable String activeSessionId = sessionManager.getActiveSessionId();
@Nullable
PlaybackStatsTracker activeStatsTracker =
activeSessionId == null ? null : playbackStatsTrackers.get(activeSessionId);
return activeStatsTracker == null ? null : activeStatsTracker.build(/* isFinal= */ false);
}
// PlaybackSessionManager.Listener implementation.
@Override
public void onSessionCreated(EventTime eventTime, String sessionId) {
PlaybackStatsTracker tracker = new PlaybackStatsTracker(keepHistory, eventTime);
playbackStatsTrackers.put(sessionId, tracker);
sessionStartEventTimes.put(sessionId, eventTime);
}
@Override
public void onSessionActive(EventTime eventTime, String sessionId) {
checkNotNull(playbackStatsTrackers.get(sessionId)).onForeground();
}
@Override
public void onAdPlaybackStarted(
EventTime eventTime, String contentSessionId, String adSessionId) {
checkNotNull(playbackStatsTrackers.get(contentSessionId)).onInterruptedByAd();
}
@Override
public void onSessionFinished(
EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback) {
PlaybackStatsTracker tracker = checkNotNull(playbackStatsTrackers.remove(sessionId));
EventTime startEventTime = checkNotNull(sessionStartEventTimes.remove(sessionId));
long discontinuityFromPositionMs =
sessionId.equals(discontinuityFromSession)
? this.discontinuityFromPositionMs
: C.TIME_UNSET;
tracker.onFinished(eventTime, automaticTransitionToNextPlayback, discontinuityFromPositionMs);
PlaybackStats playbackStats = tracker.build(/* isFinal= */ true);
finishedPlaybackStats = PlaybackStats.merge(finishedPlaybackStats, playbackStats);
if (callback != null) {
callback.onPlaybackStatsReady(startEventTime, playbackStats);
}
}
// AnalyticsListener implementation.
@Override
public void onPositionDiscontinuity(
EventTime eventTime,
Player.PositionInfo oldPosition,
Player.PositionInfo newPosition,
@Player.DiscontinuityReason int reason) {
if (discontinuityFromSession == null) {
discontinuityFromSession = sessionManager.getActiveSessionId();
discontinuityFromPositionMs = oldPosition.positionMs;
}
discontinuityReason = reason;
}
@Override
public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {
this.droppedFrames = droppedFrames;
}
@Override
public void onLoadError(
EventTime eventTime,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData,
IOException error,
boolean wasCanceled) {
nonFatalException = error;
}
@Override
public void onDrmSessionManagerError(EventTime eventTime, Exception error) {
nonFatalException = error;
}
@Override
public void onBandwidthEstimate(
EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {
bandwidthTimeMs = totalLoadTimeMs;
bandwidthBytes = totalBytesLoaded;
}
@Override
public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) {
if (mediaLoadData.trackType == C.TRACK_TYPE_VIDEO
|| mediaLoadData.trackType == C.TRACK_TYPE_DEFAULT) {
videoFormat = mediaLoadData.trackFormat;
} else if (mediaLoadData.trackType == C.TRACK_TYPE_AUDIO) {
audioFormat = mediaLoadData.trackFormat;
}
}
@Override
public void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) {
this.videoSize = videoSize;
}
@Override
public void onEvents(Player player, Events events) {
if (events.size() == 0) {
return;
}
maybeAddSessions(events);
for (String session : playbackStatsTrackers.keySet()) {
Pair<EventTime, Boolean> eventTimeAndBelongsToPlayback = findBestEventTime(events, session);
PlaybackStatsTracker tracker = playbackStatsTrackers.get(session);
boolean hasDiscontinuityToPlayback = hasEvent(events, session, EVENT_POSITION_DISCONTINUITY);
boolean hasDroppedFrames = hasEvent(events, session, EVENT_DROPPED_VIDEO_FRAMES);
boolean hasAudioUnderrun = hasEvent(events, session, EVENT_AUDIO_UNDERRUN);
boolean startedLoading = hasEvent(events, session, EVENT_LOAD_STARTED);
boolean hasFatalError = hasEvent(events, session, EVENT_PLAYER_ERROR);
boolean hasNonFatalException =
hasEvent(events, session, EVENT_LOAD_ERROR)
|| hasEvent(events, session, EVENT_DRM_SESSION_MANAGER_ERROR);
boolean hasBandwidthData = hasEvent(events, session, EVENT_BANDWIDTH_ESTIMATE);
boolean hasFormatData = hasEvent(events, session, EVENT_DOWNSTREAM_FORMAT_CHANGED);
boolean hasVideoSize = hasEvent(events, session, EVENT_VIDEO_SIZE_CHANGED);
tracker.onEvents(
player,
/* eventTime= */ eventTimeAndBelongsToPlayback.first,
/* belongsToPlayback= */ eventTimeAndBelongsToPlayback.second,
session.equals(discontinuityFromSession) ? discontinuityFromPositionMs : C.TIME_UNSET,
hasDiscontinuityToPlayback,
hasDroppedFrames ? droppedFrames : 0,
hasAudioUnderrun,
startedLoading,
hasFatalError ? player.getPlayerError() : null,
hasNonFatalException ? nonFatalException : null,
hasBandwidthData ? bandwidthTimeMs : 0,
hasBandwidthData ? bandwidthBytes : 0,
hasFormatData ? videoFormat : null,
hasFormatData ? audioFormat : null,
hasVideoSize ? videoSize : null);
}
videoFormat = null;
audioFormat = null;
discontinuityFromSession = null;
if (events.contains(AnalyticsListener.EVENT_PLAYER_RELEASED)) {
sessionManager.finishAllSessions(events.getEventTime(EVENT_PLAYER_RELEASED));
}
}
private void maybeAddSessions(Events events) {
for (int i = 0; i < events.size(); i++) {
@EventFlags int event = events.get(i);
EventTime eventTime = events.getEventTime(event);
if (event == EVENT_TIMELINE_CHANGED) {
sessionManager.updateSessionsWithTimelineChange(eventTime);
} else if (event == EVENT_POSITION_DISCONTINUITY) {
sessionManager.updateSessionsWithDiscontinuity(eventTime, discontinuityReason);
} else {
sessionManager.updateSessions(eventTime);
}
}
}
private Pair<EventTime, Boolean> findBestEventTime(Events events, String session) {
@Nullable EventTime eventTime = null;
boolean belongsToPlayback = false;
for (int i = 0; i < events.size(); i++) {
@EventFlags int event = events.get(i);
EventTime newEventTime = events.getEventTime(event);
boolean newBelongsToPlayback = sessionManager.belongsToSession(newEventTime, session);
if (eventTime == null
|| (newBelongsToPlayback && !belongsToPlayback)
|| (newBelongsToPlayback == belongsToPlayback
&& newEventTime.realtimeMs > eventTime.realtimeMs)) {
// Prefer event times for the current playback and prefer later timestamps.
eventTime = newEventTime;
belongsToPlayback = newBelongsToPlayback;
}
}
checkNotNull(eventTime);
if (!belongsToPlayback && eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) {
// Replace ad event time with content event time unless it's for the ad playback itself.
long contentPeriodPositionUs =
eventTime
.timeline
.getPeriodByUid(eventTime.mediaPeriodId.periodUid, period)
.getAdGroupTimeUs(eventTime.mediaPeriodId.adGroupIndex);
if (contentPeriodPositionUs == C.TIME_END_OF_SOURCE) {
contentPeriodPositionUs = period.durationUs;
}
long contentWindowPositionUs = contentPeriodPositionUs + period.getPositionInWindowUs();
eventTime =
new EventTime(
eventTime.realtimeMs,
eventTime.timeline,
eventTime.windowIndex,
new MediaPeriodId(
eventTime.mediaPeriodId.periodUid,
eventTime.mediaPeriodId.windowSequenceNumber,
eventTime.mediaPeriodId.adGroupIndex),
/* eventPlaybackPositionMs= */ Util.usToMs(contentWindowPositionUs),
eventTime.timeline,
eventTime.currentWindowIndex,
eventTime.currentMediaPeriodId,
eventTime.currentPlaybackPositionMs,
eventTime.totalBufferedDurationMs);
belongsToPlayback = sessionManager.belongsToSession(eventTime, session);
}
return Pair.create(eventTime, belongsToPlayback);
}
private boolean hasEvent(Events events, String session, @EventFlags int event) {
return events.contains(event)
&& sessionManager.belongsToSession(events.getEventTime(event), session);
}
/** Tracker for playback stats of a single playback. */
private static final class PlaybackStatsTracker {
// Final stats.
private final boolean keepHistory;
private final long[] playbackStateDurationsMs;
private final List<EventTimeAndPlaybackState> playbackStateHistory;
private final List<long[]> mediaTimeHistory;
private final List<EventTimeAndFormat> videoFormatHistory;
private final List<EventTimeAndFormat> audioFormatHistory;
private final List<EventTimeAndException> fatalErrorHistory;
private final List<EventTimeAndException> nonFatalErrorHistory;
private final boolean isAd;
private long firstReportedTimeMs;
private boolean hasBeenReady;
private boolean hasEnded;
private boolean isJoinTimeInvalid;
private int pauseCount;
private int pauseBufferCount;
private int seekCount;
private int rebufferCount;
private long maxRebufferTimeMs;
private int initialVideoFormatHeight;
private long initialVideoFormatBitrate;
private long initialAudioFormatBitrate;
private long videoFormatHeightTimeMs;
private long videoFormatHeightTimeProduct;
private long videoFormatBitrateTimeMs;
private long videoFormatBitrateTimeProduct;
private long audioFormatTimeMs;
private long audioFormatBitrateTimeProduct;
private long bandwidthTimeMs;
private long bandwidthBytes;
private long droppedFrames;
private long audioUnderruns;
private int fatalErrorCount;
private int nonFatalErrorCount;
// Current player state tracking.
private @PlaybackState int currentPlaybackState;
private long currentPlaybackStateStartTimeMs;
private boolean isSeeking;
private boolean isForeground;
private boolean isInterruptedByAd;
private boolean hasFatalError;
private boolean startedLoading;
private long lastRebufferStartTimeMs;
@Nullable private Format currentVideoFormat;
@Nullable private Format currentAudioFormat;
private long lastVideoFormatStartTimeMs;
private long lastAudioFormatStartTimeMs;
private float currentPlaybackSpeed;
/**
* Creates a tracker for playback stats.
*
* @param keepHistory Whether to keep a full history of events.
* @param startTime The {@link EventTime} at which the playback stats start.
*/
public PlaybackStatsTracker(boolean keepHistory, EventTime startTime) {
this.keepHistory = keepHistory;
playbackStateDurationsMs = new long[PlaybackStats.PLAYBACK_STATE_COUNT];
playbackStateHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
mediaTimeHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
videoFormatHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
audioFormatHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
fatalErrorHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
nonFatalErrorHistory = keepHistory ? new ArrayList<>() : Collections.emptyList();
currentPlaybackState = PlaybackStats.PLAYBACK_STATE_NOT_STARTED;
currentPlaybackStateStartTimeMs = startTime.realtimeMs;
firstReportedTimeMs = C.TIME_UNSET;
maxRebufferTimeMs = C.TIME_UNSET;
isAd = startTime.mediaPeriodId != null && startTime.mediaPeriodId.isAd();
initialAudioFormatBitrate = C.LENGTH_UNSET;
initialVideoFormatBitrate = C.LENGTH_UNSET;
initialVideoFormatHeight = C.LENGTH_UNSET;
currentPlaybackSpeed = 1f;
}
/** Notifies the tracker that the current playback became the active foreground playback. */
public void onForeground() {
isForeground = true;
}
/** Notifies the tracker that the current playback is interrupted by an ad. */
public void onInterruptedByAd() {
isInterruptedByAd = true;
isSeeking = false;
}
/**
* Notifies the tracker that the current playback has finished.
*
* @param eventTime The {@link EventTime}. Does not belong to this playback.
* @param automaticTransition Whether the playback finished because of an automatic transition
* to the next playback item.
* @param discontinuityFromPositionMs The position before the discontinuity from this playback,
* {@link C#TIME_UNSET} if no discontinuity started from this playback.
*/
public void onFinished(
EventTime eventTime, boolean automaticTransition, long discontinuityFromPositionMs) {
// Simulate state change to ENDED to record natural ending of playback.
@PlaybackState
int finalPlaybackState =
currentPlaybackState == PlaybackStats.PLAYBACK_STATE_ENDED || automaticTransition
? PlaybackStats.PLAYBACK_STATE_ENDED
: PlaybackStats.PLAYBACK_STATE_ABANDONED;
maybeUpdateMediaTimeHistory(eventTime.realtimeMs, discontinuityFromPositionMs);
maybeRecordVideoFormatTime(eventTime.realtimeMs);
maybeRecordAudioFormatTime(eventTime.realtimeMs);
updatePlaybackState(finalPlaybackState, eventTime);
}
/**
* Notifies the tracker of new events.
*
* @param player The {@link Player}.
* @param eventTime The {@link EventTime} of the events.
* @param belongsToPlayback Whether the {@code eventTime} belongs to this playback.
* @param discontinuityFromPositionMs The position before the discontinuity from this playback,
* or {@link C#TIME_UNSET} if no discontinuity started from this playback.
* @param hasDiscontinuity Whether a discontinuity to this playback occurred.
* @param droppedFrameCount The number of newly dropped frames for this playback.
* @param hasAudioUnderun Whether a new audio underrun occurred for this playback.
* @param startedLoading Whether this playback started loading.
* @param fatalError A fatal error for this playback, or null.
* @param nonFatalException A non-fatal exception for this playback, or null.
* @param bandwidthTimeMs The time in milliseconds spent loading for this playback.
* @param bandwidthBytes The number of bytes loaded for this playback.
* @param videoFormat A reported downstream video format for this playback, or null.
* @param audioFormat A reported downstream audio format for this playback, or null.
* @param videoSize The reported video size for this playback, or null.
*/
public void onEvents(
Player player,
EventTime eventTime,
boolean belongsToPlayback,
long discontinuityFromPositionMs,
boolean hasDiscontinuity,
int droppedFrameCount,
boolean hasAudioUnderun,
boolean startedLoading,
@Nullable PlaybackException fatalError,
@Nullable Exception nonFatalException,
long bandwidthTimeMs,
long bandwidthBytes,
@Nullable Format videoFormat,
@Nullable Format audioFormat,
@Nullable VideoSize videoSize) {
if (discontinuityFromPositionMs != C.TIME_UNSET) {
maybeUpdateMediaTimeHistory(eventTime.realtimeMs, discontinuityFromPositionMs);
isSeeking = true;
}
if (player.getPlaybackState() != Player.STATE_BUFFERING) {
isSeeking = false;
}
int playerPlaybackState = player.getPlaybackState();
if (playerPlaybackState == Player.STATE_IDLE
|| playerPlaybackState == Player.STATE_ENDED
|| hasDiscontinuity) {
isInterruptedByAd = false;
}
if (fatalError != null) {
hasFatalError = true;
fatalErrorCount++;
if (keepHistory) {
fatalErrorHistory.add(new EventTimeAndException(eventTime, fatalError));
}
} else if (player.getPlayerError() == null) {
hasFatalError = false;
}
if (isForeground && !isInterruptedByAd) {
Tracks currentTracks = player.getCurrentTracks();
if (!currentTracks.isTypeSelected(C.TRACK_TYPE_VIDEO)) {
maybeUpdateVideoFormat(eventTime, /* newFormat= */ null);
}
if (!currentTracks.isTypeSelected(C.TRACK_TYPE_AUDIO)) {
maybeUpdateAudioFormat(eventTime, /* newFormat= */ null);
}
}
if (videoFormat != null) {
maybeUpdateVideoFormat(eventTime, videoFormat);
}
if (audioFormat != null) {
maybeUpdateAudioFormat(eventTime, audioFormat);
}
if (currentVideoFormat != null
&& currentVideoFormat.height == Format.NO_VALUE
&& videoSize != null) {
Format formatWithHeightAndWidth =
currentVideoFormat
.buildUpon()
.setWidth(videoSize.width)
.setHeight(videoSize.height)
.build();
maybeUpdateVideoFormat(eventTime, formatWithHeightAndWidth);
}
if (startedLoading) {
this.startedLoading = true;
}
if (hasAudioUnderun) {
audioUnderruns++;
}
this.droppedFrames += droppedFrameCount;
this.bandwidthTimeMs += bandwidthTimeMs;
this.bandwidthBytes += bandwidthBytes;
if (nonFatalException != null) {
nonFatalErrorCount++;
if (keepHistory) {
nonFatalErrorHistory.add(new EventTimeAndException(eventTime, nonFatalException));
}
}
@PlaybackState int newPlaybackState = resolveNewPlaybackState(player);
float newPlaybackSpeed = player.getPlaybackParameters().speed;
if (currentPlaybackState != newPlaybackState || currentPlaybackSpeed != newPlaybackSpeed) {
maybeUpdateMediaTimeHistory(
eventTime.realtimeMs,
belongsToPlayback ? eventTime.eventPlaybackPositionMs : C.TIME_UNSET);
maybeRecordVideoFormatTime(eventTime.realtimeMs);
maybeRecordAudioFormatTime(eventTime.realtimeMs);
}
currentPlaybackSpeed = newPlaybackSpeed;
if (currentPlaybackState != newPlaybackState) {
updatePlaybackState(newPlaybackState, eventTime);
}
}
/**
* Builds the playback stats.
*
* @param isFinal Whether this is the final build and no further events are expected.
*/
public PlaybackStats build(boolean isFinal) {
long[] playbackStateDurationsMs = this.playbackStateDurationsMs;
List<long[]> mediaTimeHistory = this.mediaTimeHistory;
if (!isFinal) {
long buildTimeMs = SystemClock.elapsedRealtime();
playbackStateDurationsMs =
Arrays.copyOf(this.playbackStateDurationsMs, PlaybackStats.PLAYBACK_STATE_COUNT);
long lastStateDurationMs = max(0, buildTimeMs - currentPlaybackStateStartTimeMs);
playbackStateDurationsMs[currentPlaybackState] += lastStateDurationMs;
maybeUpdateMaxRebufferTimeMs(buildTimeMs);
maybeRecordVideoFormatTime(buildTimeMs);
maybeRecordAudioFormatTime(buildTimeMs);
mediaTimeHistory = new ArrayList<>(this.mediaTimeHistory);
if (keepHistory && currentPlaybackState == PlaybackStats.PLAYBACK_STATE_PLAYING) {
mediaTimeHistory.add(guessMediaTimeBasedOnElapsedRealtime(buildTimeMs));
}
}
boolean isJoinTimeInvalid = this.isJoinTimeInvalid || !hasBeenReady;
long validJoinTimeMs =
isJoinTimeInvalid
? C.TIME_UNSET
: playbackStateDurationsMs[PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND];
boolean hasBackgroundJoin =
playbackStateDurationsMs[PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND] > 0;
List<EventTimeAndFormat> videoHistory =
isFinal ? videoFormatHistory : new ArrayList<>(videoFormatHistory);
List<EventTimeAndFormat> audioHistory =
isFinal ? audioFormatHistory : new ArrayList<>(audioFormatHistory);
return new PlaybackStats(
/* playbackCount= */ 1,
playbackStateDurationsMs,
isFinal ? playbackStateHistory : new ArrayList<>(playbackStateHistory),
mediaTimeHistory,
firstReportedTimeMs,
/* foregroundPlaybackCount= */ isForeground ? 1 : 0,
/* abandonedBeforeReadyCount= */ hasBeenReady ? 0 : 1,
/* endedCount= */ hasEnded ? 1 : 0,
/* backgroundJoiningCount= */ hasBackgroundJoin ? 1 : 0,
validJoinTimeMs,
/* validJoinTimeCount= */ isJoinTimeInvalid ? 0 : 1,
pauseCount,
pauseBufferCount,
seekCount,
rebufferCount,
maxRebufferTimeMs,
/* adPlaybackCount= */ isAd ? 1 : 0,
videoHistory,
audioHistory,
videoFormatHeightTimeMs,
videoFormatHeightTimeProduct,
videoFormatBitrateTimeMs,
videoFormatBitrateTimeProduct,
audioFormatTimeMs,
audioFormatBitrateTimeProduct,
/* initialVideoFormatHeightCount= */ initialVideoFormatHeight == C.LENGTH_UNSET ? 0 : 1,
/* initialVideoFormatBitrateCount= */ initialVideoFormatBitrate == C.LENGTH_UNSET ? 0 : 1,
initialVideoFormatHeight,
initialVideoFormatBitrate,
/* initialAudioFormatBitrateCount= */ initialAudioFormatBitrate == C.LENGTH_UNSET ? 0 : 1,
initialAudioFormatBitrate,
bandwidthTimeMs,
bandwidthBytes,
droppedFrames,
audioUnderruns,
/* fatalErrorPlaybackCount= */ fatalErrorCount > 0 ? 1 : 0,
fatalErrorCount,
nonFatalErrorCount,
fatalErrorHistory,
nonFatalErrorHistory);
}
private void updatePlaybackState(@PlaybackState int newPlaybackState, EventTime eventTime) {
Assertions.checkArgument(eventTime.realtimeMs >= currentPlaybackStateStartTimeMs);
long stateDurationMs = eventTime.realtimeMs - currentPlaybackStateStartTimeMs;
playbackStateDurationsMs[currentPlaybackState] += stateDurationMs;
if (firstReportedTimeMs == C.TIME_UNSET) {
firstReportedTimeMs = eventTime.realtimeMs;
}
isJoinTimeInvalid |= isInvalidJoinTransition(currentPlaybackState, newPlaybackState);
hasBeenReady |= isReadyState(newPlaybackState);
hasEnded |= newPlaybackState == PlaybackStats.PLAYBACK_STATE_ENDED;
if (!isPausedState(currentPlaybackState) && isPausedState(newPlaybackState)) {
pauseCount++;
}
if (newPlaybackState == PlaybackStats.PLAYBACK_STATE_SEEKING) {
seekCount++;
}
if (!isRebufferingState(currentPlaybackState) && isRebufferingState(newPlaybackState)) {
rebufferCount++;
lastRebufferStartTimeMs = eventTime.realtimeMs;
}
if (isRebufferingState(currentPlaybackState)
&& currentPlaybackState != PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING
&& newPlaybackState == PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING) {
pauseBufferCount++;
}
maybeUpdateMaxRebufferTimeMs(eventTime.realtimeMs);
currentPlaybackState = newPlaybackState;
currentPlaybackStateStartTimeMs = eventTime.realtimeMs;
if (keepHistory) {
playbackStateHistory.add(new EventTimeAndPlaybackState(eventTime, currentPlaybackState));
}
}
private @PlaybackState int resolveNewPlaybackState(Player player) {
@Player.State int playerPlaybackState = player.getPlaybackState();
if (isSeeking && isForeground) {
// Seeking takes precedence over errors such that we report a seek while in error state.
return PlaybackStats.PLAYBACK_STATE_SEEKING;
} else if (hasFatalError) {
return PlaybackStats.PLAYBACK_STATE_FAILED;
} else if (!isForeground) {
// Before the playback becomes foreground, only report background joining and not started.
return startedLoading
? PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND
: PlaybackStats.PLAYBACK_STATE_NOT_STARTED;
} else if (isInterruptedByAd) {
return PlaybackStats.PLAYBACK_STATE_INTERRUPTED_BY_AD;
} else if (playerPlaybackState == Player.STATE_ENDED) {
return PlaybackStats.PLAYBACK_STATE_ENDED;
} else if (playerPlaybackState == Player.STATE_BUFFERING) {
if (currentPlaybackState == PlaybackStats.PLAYBACK_STATE_NOT_STARTED
|| currentPlaybackState == PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND
|| currentPlaybackState == PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND
|| currentPlaybackState == PlaybackStats.PLAYBACK_STATE_INTERRUPTED_BY_AD) {
return PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND;
}
if (!player.getPlayWhenReady()) {
return PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING;
}
return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE
? PlaybackStats.PLAYBACK_STATE_SUPPRESSED_BUFFERING
: PlaybackStats.PLAYBACK_STATE_BUFFERING;
} else if (playerPlaybackState == Player.STATE_READY) {
if (!player.getPlayWhenReady()) {
return PlaybackStats.PLAYBACK_STATE_PAUSED;
}
return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE
? PlaybackStats.PLAYBACK_STATE_SUPPRESSED
: PlaybackStats.PLAYBACK_STATE_PLAYING;
} else if (playerPlaybackState == Player.STATE_IDLE
&& currentPlaybackState != PlaybackStats.PLAYBACK_STATE_NOT_STARTED) {
// This case only applies for calls to player.stop(). All other IDLE cases are handled by
// !isForeground, hasFatalError or isSuspended. NOT_STARTED is deliberately ignored.
return PlaybackStats.PLAYBACK_STATE_STOPPED;
}
return currentPlaybackState;
}
private void maybeUpdateMaxRebufferTimeMs(long nowMs) {
if (isRebufferingState(currentPlaybackState)) {
long rebufferDurationMs = nowMs - lastRebufferStartTimeMs;
if (maxRebufferTimeMs == C.TIME_UNSET || rebufferDurationMs > maxRebufferTimeMs) {
maxRebufferTimeMs = rebufferDurationMs;
}
}
}
private void maybeUpdateMediaTimeHistory(long realtimeMs, long mediaTimeMs) {
if (!keepHistory) {
return;
}
if (currentPlaybackState != PlaybackStats.PLAYBACK_STATE_PLAYING) {
if (mediaTimeMs == C.TIME_UNSET) {
return;
}
if (!mediaTimeHistory.isEmpty()) {
long previousMediaTimeMs = mediaTimeHistory.get(mediaTimeHistory.size() - 1)[1];
if (previousMediaTimeMs != mediaTimeMs) {
mediaTimeHistory.add(new long[] {realtimeMs, previousMediaTimeMs});
}
}
}
if (mediaTimeMs != C.TIME_UNSET) {
mediaTimeHistory.add(new long[] {realtimeMs, mediaTimeMs});
} else if (!mediaTimeHistory.isEmpty()) {
mediaTimeHistory.add(guessMediaTimeBasedOnElapsedRealtime(realtimeMs));
}
}
private long[] guessMediaTimeBasedOnElapsedRealtime(long realtimeMs) {
long[] previousKnownMediaTimeHistory = mediaTimeHistory.get(mediaTimeHistory.size() - 1);
long previousRealtimeMs = previousKnownMediaTimeHistory[0];
long previousMediaTimeMs = previousKnownMediaTimeHistory[1];
long elapsedMediaTimeEstimateMs =
(long) ((realtimeMs - previousRealtimeMs) * currentPlaybackSpeed);
long mediaTimeEstimateMs = previousMediaTimeMs + elapsedMediaTimeEstimateMs;
return new long[] {realtimeMs, mediaTimeEstimateMs};
}
private void maybeUpdateVideoFormat(EventTime eventTime, @Nullable Format newFormat) {
if (Util.areEqual(currentVideoFormat, newFormat)) {
return;
}
maybeRecordVideoFormatTime(eventTime.realtimeMs);
if (newFormat != null) {
if (initialVideoFormatHeight == C.LENGTH_UNSET && newFormat.height != Format.NO_VALUE) {
initialVideoFormatHeight = newFormat.height;
}
if (initialVideoFormatBitrate == C.LENGTH_UNSET && newFormat.bitrate != Format.NO_VALUE) {
initialVideoFormatBitrate = newFormat.bitrate;
}
}
currentVideoFormat = newFormat;
if (keepHistory) {
videoFormatHistory.add(new EventTimeAndFormat(eventTime, currentVideoFormat));
}
}
private void maybeUpdateAudioFormat(EventTime eventTime, @Nullable Format newFormat) {
if (Util.areEqual(currentAudioFormat, newFormat)) {
return;
}
maybeRecordAudioFormatTime(eventTime.realtimeMs);
if (newFormat != null
&& initialAudioFormatBitrate == C.LENGTH_UNSET
&& newFormat.bitrate != Format.NO_VALUE) {
initialAudioFormatBitrate = newFormat.bitrate;
}
currentAudioFormat = newFormat;
if (keepHistory) {
audioFormatHistory.add(new EventTimeAndFormat(eventTime, currentAudioFormat));
}
}
private void maybeRecordVideoFormatTime(long nowMs) {
if (currentPlaybackState == PlaybackStats.PLAYBACK_STATE_PLAYING
&& currentVideoFormat != null) {
long mediaDurationMs = (long) ((nowMs - lastVideoFormatStartTimeMs) * currentPlaybackSpeed);
if (currentVideoFormat.height != Format.NO_VALUE) {
videoFormatHeightTimeMs += mediaDurationMs;
videoFormatHeightTimeProduct += mediaDurationMs * currentVideoFormat.height;
}
if (currentVideoFormat.bitrate != Format.NO_VALUE) {
videoFormatBitrateTimeMs += mediaDurationMs;
videoFormatBitrateTimeProduct += mediaDurationMs * currentVideoFormat.bitrate;
}
}
lastVideoFormatStartTimeMs = nowMs;
}
private void maybeRecordAudioFormatTime(long nowMs) {
if (currentPlaybackState == PlaybackStats.PLAYBACK_STATE_PLAYING
&& currentAudioFormat != null
&& currentAudioFormat.bitrate != Format.NO_VALUE) {
long mediaDurationMs = (long) ((nowMs - lastAudioFormatStartTimeMs) * currentPlaybackSpeed);
audioFormatTimeMs += mediaDurationMs;
audioFormatBitrateTimeProduct += mediaDurationMs * currentAudioFormat.bitrate;
}
lastAudioFormatStartTimeMs = nowMs;
}
private static boolean isReadyState(@PlaybackState int state) {
return state == PlaybackStats.PLAYBACK_STATE_PLAYING
|| state == PlaybackStats.PLAYBACK_STATE_PAUSED
|| state == PlaybackStats.PLAYBACK_STATE_SUPPRESSED;
}
private static boolean isPausedState(@PlaybackState int state) {
return state == PlaybackStats.PLAYBACK_STATE_PAUSED
|| state == PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING;
}
private static boolean isRebufferingState(@PlaybackState int state) {
return state == PlaybackStats.PLAYBACK_STATE_BUFFERING
|| state == PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING
|| state == PlaybackStats.PLAYBACK_STATE_SUPPRESSED_BUFFERING;
}
private static boolean isInvalidJoinTransition(
@PlaybackState int oldState, @PlaybackState int newState) {
if (oldState != PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND
&& oldState != PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND
&& oldState != PlaybackStats.PLAYBACK_STATE_INTERRUPTED_BY_AD) {
return false;
}
return newState != PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND
&& newState != PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND
&& newState != PlaybackStats.PLAYBACK_STATE_INTERRUPTED_BY_AD
&& newState != PlaybackStats.PLAYBACK_STATE_PLAYING
&& newState != PlaybackStats.PLAYBACK_STATE_PAUSED
&& newState != PlaybackStats.PLAYBACK_STATE_SUPPRESSED
&& newState != PlaybackStats.PLAYBACK_STATE_ENDED;
}
}
}
|
openjdk/jdk8 | 38,069 | jdk/src/windows/classes/sun/awt/windows/WComponentPeer.java | /*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.windows;
import java.awt.*;
import java.awt.peer.*;
import java.awt.image.VolatileImage;
import sun.awt.RepaintArea;
import sun.awt.CausedFocusEvent;
import sun.awt.image.SunVolatileImage;
import sun.awt.image.ToolkitImage;
import java.awt.image.BufferedImage;
import java.awt.image.ImageProducer;
import java.awt.image.ImageObserver;
import java.awt.image.ColorModel;
import java.awt.event.PaintEvent;
import java.awt.event.InvocationEvent;
import java.awt.event.KeyEvent;
import java.awt.event.FocusEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.InputEvent;
import sun.awt.Win32GraphicsConfig;
import sun.awt.Win32GraphicsEnvironment;
import sun.java2d.InvalidPipeException;
import sun.java2d.SurfaceData;
import sun.java2d.ScreenUpdateManager;
import sun.java2d.d3d.D3DSurfaceData;
import sun.java2d.opengl.OGLSurfaceData;
import sun.java2d.pipe.Region;
import sun.awt.PaintEventDispatcher;
import sun.awt.SunToolkit;
import sun.awt.event.IgnorePaintEvent;
import java.awt.dnd.DropTarget;
import java.awt.dnd.peer.DropTargetPeer;
import sun.awt.AWTAccessor;
import sun.util.logging.PlatformLogger;
public abstract class WComponentPeer extends WObjectPeer
implements ComponentPeer, DropTargetPeer
{
/**
* Handle to native window
*/
protected volatile long hwnd;
private static final PlatformLogger log = PlatformLogger.getLogger("sun.awt.windows.WComponentPeer");
private static final PlatformLogger shapeLog = PlatformLogger.getLogger("sun.awt.windows.shape.WComponentPeer");
private static final PlatformLogger focusLog = PlatformLogger.getLogger("sun.awt.windows.focus.WComponentPeer");
// ComponentPeer implementation
SurfaceData surfaceData;
private RepaintArea paintArea;
protected Win32GraphicsConfig winGraphicsConfig;
boolean isLayouting = false;
boolean paintPending = false;
int oldWidth = -1;
int oldHeight = -1;
private int numBackBuffers = 0;
private VolatileImage backBuffer = null;
private BufferCapabilities backBufferCaps = null;
// foreground, background and color are cached to avoid calling back
// into the Component.
private Color foreground;
private Color background;
private Font font;
public native boolean isObscured();
public boolean canDetermineObscurity() { return true; }
// DropTarget support
int nDropTargets;
long nativeDropTargetContext; // native pointer
public synchronized native void pShow();
public synchronized native void hide();
public synchronized native void enable();
public synchronized native void disable();
public long getHWnd() {
return hwnd;
}
/* New 1.1 API */
public native Point getLocationOnScreen();
/* New 1.1 API */
public void setVisible(boolean b) {
if (b) {
show();
} else {
hide();
}
}
public void show() {
Dimension s = ((Component)target).getSize();
oldHeight = s.height;
oldWidth = s.width;
pShow();
}
/* New 1.1 API */
public void setEnabled(boolean b) {
if (b) {
enable();
} else {
disable();
}
}
public int serialNum = 0;
private native void reshapeNoCheck(int x, int y, int width, int height);
/* New 1.1 API */
public void setBounds(int x, int y, int width, int height, int op) {
// Should set paintPending before reahape to prevent
// thread race between paint events
// Native components do redraw after resize
paintPending = (width != oldWidth) || (height != oldHeight);
if ( (op & NO_EMBEDDED_CHECK) != 0 ) {
reshapeNoCheck(x, y, width, height);
} else {
reshape(x, y, width, height);
}
if ((width != oldWidth) || (height != oldHeight)) {
// Only recreate surfaceData if this setBounds is called
// for a resize; a simple move should not trigger a recreation
try {
replaceSurfaceData();
} catch (InvalidPipeException e) {
// REMIND : what do we do if our surface creation failed?
}
oldWidth = width;
oldHeight = height;
}
serialNum++;
}
/*
* Called from native code (on Toolkit thread) in order to
* dynamically layout the Container during resizing
*/
void dynamicallyLayoutContainer() {
// If we got the WM_SIZING, this must be a Container, right?
// In fact, it must be the top-level Container.
if (log.isLoggable(PlatformLogger.Level.FINE)) {
Container parent = WToolkit.getNativeContainer((Component)target);
if (parent != null) {
log.fine("Assertion (parent == null) failed");
}
}
final Container cont = (Container)target;
WToolkit.executeOnEventHandlerThread(cont, new Runnable() {
public void run() {
// Discarding old paint events doesn't seem to be necessary.
cont.invalidate();
cont.validate();
if (surfaceData instanceof D3DSurfaceData.D3DWindowSurfaceData ||
surfaceData instanceof OGLSurfaceData)
{
// When OGL or D3D is enabled, it is necessary to
// replace the SurfaceData for each dynamic layout
// request so that the viewport stays in sync
// with the window bounds.
try {
replaceSurfaceData();
} catch (InvalidPipeException e) {
// REMIND: this is unlikely to occur for OGL, but
// what do we do if surface creation fails?
}
}
// Forcing a paint here doesn't seem to be necessary.
// paintDamagedAreaImmediately();
}
});
}
/*
* Paints any portion of the component that needs updating
* before the call returns (similar to the Win32 API UpdateWindow)
*/
void paintDamagedAreaImmediately() {
// force Windows to send any pending WM_PAINT events so
// the damage area is updated on the Java side
updateWindow();
// make sure paint events are transferred to main event queue
// for coalescing
SunToolkit.flushPendingEvents();
// paint the damaged area
paintArea.paint(target, shouldClearRectBeforePaint());
}
native synchronized void updateWindow();
public void paint(Graphics g) {
((Component)target).paint(g);
}
public void repaint(long tm, int x, int y, int width, int height) {
}
private static final double BANDING_DIVISOR = 4.0;
private native int[] createPrintedPixels(int srcX, int srcY,
int srcW, int srcH,
int alpha);
public void print(Graphics g) {
Component comp = (Component)target;
// To conserve memory usage, we will band the image.
int totalW = comp.getWidth();
int totalH = comp.getHeight();
int hInc = (int)(totalH / BANDING_DIVISOR);
if (hInc == 0) {
hInc = totalH;
}
for (int startY = 0; startY < totalH; startY += hInc) {
int endY = startY + hInc - 1;
if (endY >= totalH) {
endY = totalH - 1;
}
int h = endY - startY + 1;
Color bgColor = comp.getBackground();
int[] pix = createPrintedPixels(0, startY, totalW, h,
bgColor == null ? 255 : bgColor.getAlpha());
if (pix != null) {
BufferedImage bim = new BufferedImage(totalW, h,
BufferedImage.TYPE_INT_ARGB);
bim.setRGB(0, 0, totalW, h, pix, 0, totalW);
g.drawImage(bim, 0, startY, null);
bim.flush();
}
}
comp.print(g);
}
public void coalescePaintEvent(PaintEvent e) {
Rectangle r = e.getUpdateRect();
if (!(e instanceof IgnorePaintEvent)) {
paintArea.add(r, e.getID());
}
if (log.isLoggable(PlatformLogger.Level.FINEST)) {
switch(e.getID()) {
case PaintEvent.UPDATE:
log.finest("coalescePaintEvent: UPDATE: add: x = " +
r.x + ", y = " + r.y + ", width = " + r.width + ", height = " + r.height);
return;
case PaintEvent.PAINT:
log.finest("coalescePaintEvent: PAINT: add: x = " +
r.x + ", y = " + r.y + ", width = " + r.width + ", height = " + r.height);
return;
}
}
}
public synchronized native void reshape(int x, int y, int width, int height);
// returns true if the event has been handled and shouldn't be propagated
// though handleEvent method chain - e.g. WTextFieldPeer returns true
// on handling '\n' to prevent it from being passed to native code
public boolean handleJavaKeyEvent(KeyEvent e) { return false; }
public void handleJavaMouseEvent(MouseEvent e) {
switch (e.getID()) {
case MouseEvent.MOUSE_PRESSED:
// Note that Swing requests focus in its own mouse event handler.
if (target == e.getSource() &&
!((Component)target).isFocusOwner() &&
WKeyboardFocusManagerPeer.shouldFocusOnClick((Component)target))
{
WKeyboardFocusManagerPeer.requestFocusFor((Component)target,
CausedFocusEvent.Cause.MOUSE_EVENT);
}
break;
}
}
native void nativeHandleEvent(AWTEvent e);
@SuppressWarnings("fallthrough")
public void handleEvent(AWTEvent e) {
int id = e.getID();
if ((e instanceof InputEvent) && !((InputEvent)e).isConsumed() &&
((Component)target).isEnabled())
{
if (e instanceof MouseEvent && !(e instanceof MouseWheelEvent)) {
handleJavaMouseEvent((MouseEvent) e);
} else if (e instanceof KeyEvent) {
if (handleJavaKeyEvent((KeyEvent)e)) {
return;
}
}
}
switch(id) {
case PaintEvent.PAINT:
// Got native painting
paintPending = false;
// Fallthrough to next statement
case PaintEvent.UPDATE:
// Skip all painting while layouting and all UPDATEs
// while waiting for native paint
if (!isLayouting && ! paintPending) {
paintArea.paint(target,shouldClearRectBeforePaint());
}
return;
case FocusEvent.FOCUS_LOST:
case FocusEvent.FOCUS_GAINED:
handleJavaFocusEvent((FocusEvent)e);
default:
break;
}
// Call the native code
nativeHandleEvent(e);
}
void handleJavaFocusEvent(FocusEvent fe) {
if (focusLog.isLoggable(PlatformLogger.Level.FINER)) {
focusLog.finer(fe.toString());
}
setFocus(fe.getID() == FocusEvent.FOCUS_GAINED);
}
native void setFocus(boolean doSetFocus);
public Dimension getMinimumSize() {
return ((Component)target).getSize();
}
public Dimension getPreferredSize() {
return getMinimumSize();
}
// Do nothing for heavyweight implementation
public void layout() {}
public Rectangle getBounds() {
return ((Component)target).getBounds();
}
public boolean isFocusable() {
return false;
}
/*
* Return the GraphicsConfiguration associated with this peer, either
* the locally stored winGraphicsConfig, or that of the target Component.
*/
public GraphicsConfiguration getGraphicsConfiguration() {
if (winGraphicsConfig != null) {
return winGraphicsConfig;
}
else {
// we don't need a treelock here, since
// Component.getGraphicsConfiguration() gets it itself.
return ((Component)target).getGraphicsConfiguration();
}
}
public SurfaceData getSurfaceData() {
return surfaceData;
}
/**
* Creates new surfaceData object and invalidates the previous
* surfaceData object.
* Replacing the surface data should never lock on any resources which are
* required by other threads which may have them and may require
* the tree-lock.
* This is a degenerate version of replaceSurfaceData(numBackBuffers), so
* just call that version with our current numBackBuffers.
*/
public void replaceSurfaceData() {
replaceSurfaceData(this.numBackBuffers, this.backBufferCaps);
}
public void createScreenSurface(boolean isResize)
{
Win32GraphicsConfig gc = (Win32GraphicsConfig)getGraphicsConfiguration();
ScreenUpdateManager mgr = ScreenUpdateManager.getInstance();
surfaceData = mgr.createScreenSurface(gc, this, numBackBuffers, isResize);
}
/**
* Multi-buffer version of replaceSurfaceData. This version is called
* by createBuffers(), which needs to acquire the same locks in the same
* order, but also needs to perform additional functions inside the
* locks.
*/
public void replaceSurfaceData(int newNumBackBuffers,
BufferCapabilities caps)
{
SurfaceData oldData = null;
VolatileImage oldBB = null;
synchronized(((Component)target).getTreeLock()) {
synchronized(this) {
if (pData == 0) {
return;
}
numBackBuffers = newNumBackBuffers;
ScreenUpdateManager mgr = ScreenUpdateManager.getInstance();
oldData = surfaceData;
mgr.dropScreenSurface(oldData);
createScreenSurface(true);
if (oldData != null) {
oldData.invalidate();
}
oldBB = backBuffer;
if (numBackBuffers > 0) {
// set the caps first, they're used when creating the bb
backBufferCaps = caps;
Win32GraphicsConfig gc =
(Win32GraphicsConfig)getGraphicsConfiguration();
backBuffer = gc.createBackBuffer(this);
} else if (backBuffer != null) {
backBufferCaps = null;
backBuffer = null;
}
}
}
// it would be better to do this before we create new ones,
// but then we'd run into deadlock issues
if (oldData != null) {
oldData.flush();
// null out the old data to make it collected faster
oldData = null;
}
if (oldBB != null) {
oldBB.flush();
// null out the old data to make it collected faster
oldData = null;
}
}
public void replaceSurfaceDataLater() {
Runnable r = new Runnable() {
public void run() {
// Shouldn't do anything if object is disposed in meanwhile
// No need for sync as disposeAction in Window is performed
// on EDT
if (!isDisposed()) {
try {
replaceSurfaceData();
} catch (InvalidPipeException e) {
// REMIND : what do we do if our surface creation failed?
}
}
}
};
Component c = (Component)target;
// Fix 6255371.
if (!PaintEventDispatcher.getPaintEventDispatcher().queueSurfaceDataReplacing(c, r)) {
postEvent(new InvocationEvent(c, r));
}
}
public boolean updateGraphicsData(GraphicsConfiguration gc) {
winGraphicsConfig = (Win32GraphicsConfig)gc;
try {
replaceSurfaceData();
} catch (InvalidPipeException e) {
// REMIND : what do we do if our surface creation failed?
}
return false;
}
//This will return null for Components not yet added to a Container
public ColorModel getColorModel() {
GraphicsConfiguration gc = getGraphicsConfiguration();
if (gc != null) {
return gc.getColorModel();
}
else {
return null;
}
}
//This will return null for Components not yet added to a Container
public ColorModel getDeviceColorModel() {
Win32GraphicsConfig gc =
(Win32GraphicsConfig)getGraphicsConfiguration();
if (gc != null) {
return gc.getDeviceColorModel();
}
else {
return null;
}
}
//Returns null for Components not yet added to a Container
public ColorModel getColorModel(int transparency) {
// return WToolkit.config.getColorModel(transparency);
GraphicsConfiguration gc = getGraphicsConfiguration();
if (gc != null) {
return gc.getColorModel(transparency);
}
else {
return null;
}
}
// fallback default font object
final static Font defaultFont = new Font(Font.DIALOG, Font.PLAIN, 12);
@SuppressWarnings("deprecation")
public Graphics getGraphics() {
if (isDisposed()) {
return null;
}
Component target = (Component)getTarget();
Window window = SunToolkit.getContainingWindow(target);
if (window != null) {
Graphics g =
((WWindowPeer)window.getPeer()).getTranslucentGraphics();
// getTranslucentGraphics() returns non-null value for non-opaque windows only
if (g != null) {
// Non-opaque windows do not support heavyweight children.
// Redirect all painting to the Window's Graphics instead.
// The caller is responsible for calling the
// WindowPeer.updateWindow() after painting has finished.
int x = 0, y = 0;
for (Component c = target; c != window; c = c.getParent()) {
x += c.getX();
y += c.getY();
}
g.translate(x, y);
g.clipRect(0, 0, target.getWidth(), target.getHeight());
return g;
}
}
SurfaceData surfaceData = this.surfaceData;
if (surfaceData != null) {
/* Fix for bug 4746122. Color and Font shouldn't be null */
Color bgColor = background;
if (bgColor == null) {
bgColor = SystemColor.window;
}
Color fgColor = foreground;
if (fgColor == null) {
fgColor = SystemColor.windowText;
}
Font font = this.font;
if (font == null) {
font = defaultFont;
}
ScreenUpdateManager mgr =
ScreenUpdateManager.getInstance();
return mgr.createGraphics(surfaceData, this, fgColor,
bgColor, font);
}
return null;
}
public FontMetrics getFontMetrics(Font font) {
return WFontMetrics.getFontMetrics(font);
}
private synchronized native void _dispose();
protected void disposeImpl() {
SurfaceData oldData = surfaceData;
surfaceData = null;
ScreenUpdateManager.getInstance().dropScreenSurface(oldData);
oldData.invalidate();
// remove from updater before calling targetDisposedPeer
WToolkit.targetDisposedPeer(target, this);
_dispose();
}
public void disposeLater() {
postEvent(new InvocationEvent(target, new Runnable() {
public void run() {
dispose();
}
}));
}
public synchronized void setForeground(Color c) {
foreground = c;
_setForeground(c.getRGB());
}
public synchronized void setBackground(Color c) {
background = c;
_setBackground(c.getRGB());
}
/**
* This method is intentionally not synchronized as it is called while
* holding other locks.
*
* @see sun.java2d.d3d.D3DScreenUpdateManager#validate(D3DWindowSurfaceData)
*/
public Color getBackgroundNoSync() {
return background;
}
public native void _setForeground(int rgb);
public native void _setBackground(int rgb);
public synchronized void setFont(Font f) {
font = f;
_setFont(f);
}
public synchronized native void _setFont(Font f);
public void updateCursorImmediately() {
WGlobalCursorManager.getCursorManager().updateCursorImmediately();
}
// TODO: consider moving it to KeyboardFocusManagerPeerImpl
@SuppressWarnings("deprecation")
public boolean requestFocus(Component lightweightChild, boolean temporary,
boolean focusedWindowChangeAllowed, long time,
CausedFocusEvent.Cause cause)
{
if (WKeyboardFocusManagerPeer.
processSynchronousLightweightTransfer((Component)target, lightweightChild, temporary,
focusedWindowChangeAllowed, time))
{
return true;
}
int result = WKeyboardFocusManagerPeer
.shouldNativelyFocusHeavyweight((Component)target, lightweightChild,
temporary, focusedWindowChangeAllowed,
time, cause);
switch (result) {
case WKeyboardFocusManagerPeer.SNFH_FAILURE:
return false;
case WKeyboardFocusManagerPeer.SNFH_SUCCESS_PROCEED:
if (focusLog.isLoggable(PlatformLogger.Level.FINER)) {
focusLog.finer("Proceeding with request to " + lightweightChild + " in " + target);
}
Window parentWindow = SunToolkit.getContainingWindow((Component)target);
if (parentWindow == null) {
return rejectFocusRequestHelper("WARNING: Parent window is null");
}
WWindowPeer wpeer = (WWindowPeer)parentWindow.getPeer();
if (wpeer == null) {
return rejectFocusRequestHelper("WARNING: Parent window's peer is null");
}
boolean res = wpeer.requestWindowFocus(cause);
if (focusLog.isLoggable(PlatformLogger.Level.FINER)) {
focusLog.finer("Requested window focus: " + res);
}
// If parent window can be made focused and has been made focused(synchronously)
// then we can proceed with children, otherwise we retreat.
if (!(res && parentWindow.isFocused())) {
return rejectFocusRequestHelper("Waiting for asynchronous processing of the request");
}
return WKeyboardFocusManagerPeer.deliverFocus(lightweightChild,
(Component)target,
temporary,
focusedWindowChangeAllowed,
time, cause);
case WKeyboardFocusManagerPeer.SNFH_SUCCESS_HANDLED:
// Either lightweight or excessive request - all events are generated.
return true;
}
return false;
}
private boolean rejectFocusRequestHelper(String logMsg) {
if (focusLog.isLoggable(PlatformLogger.Level.FINER)) {
focusLog.finer(logMsg);
}
WKeyboardFocusManagerPeer.removeLastFocusRequest((Component)target);
return false;
}
public Image createImage(ImageProducer producer) {
return new ToolkitImage(producer);
}
public Image createImage(int width, int height) {
Win32GraphicsConfig gc =
(Win32GraphicsConfig)getGraphicsConfiguration();
return gc.createAcceleratedImage((Component)target, width, height);
}
public VolatileImage createVolatileImage(int width, int height) {
return new SunVolatileImage((Component)target, width, height);
}
public boolean prepareImage(Image img, int w, int h, ImageObserver o) {
return Toolkit.getDefaultToolkit().prepareImage(img, w, h, o);
}
public int checkImage(Image img, int w, int h, ImageObserver o) {
return Toolkit.getDefaultToolkit().checkImage(img, w, h, o);
}
// Object overrides
public String toString() {
return getClass().getName() + "[" + target + "]";
}
// Toolkit & peer internals
private int updateX1, updateY1, updateX2, updateY2;
WComponentPeer(Component target) {
this.target = target;
this.paintArea = new RepaintArea();
create(getNativeParent());
// fix for 5088782: check if window object is created successfully
checkCreation();
createScreenSurface(false);
initialize();
start(); // Initialize enable/disable state, turn on callbacks
}
abstract void create(WComponentPeer parent);
/**
* Gets the native parent of this peer. We use the term "parent" explicitly,
* because we override the method in top-level window peer implementations.
*
* @return the parent container/owner of this peer.
*/
WComponentPeer getNativeParent() {
Container parent = SunToolkit.getNativeContainer((Component) target);
return (WComponentPeer) WToolkit.targetToPeer(parent);
}
protected void checkCreation()
{
if ((hwnd == 0) || (pData == 0))
{
if (createError != null)
{
throw createError;
}
else
{
throw new InternalError("couldn't create component peer");
}
}
}
synchronized native void start();
void initialize() {
if (((Component)target).isVisible()) {
show(); // the wnd starts hidden
}
Color fg = ((Component)target).getForeground();
if (fg != null) {
setForeground(fg);
}
// Set background color in C++, to avoid inheriting a parent's color.
Font f = ((Component)target).getFont();
if (f != null) {
setFont(f);
}
if (! ((Component)target).isEnabled()) {
disable();
}
Rectangle r = ((Component)target).getBounds();
setBounds(r.x, r.y, r.width, r.height, SET_BOUNDS);
}
// Callbacks for window-system events to the frame
// Invoke a update() method call on the target
void handleRepaint(int x, int y, int w, int h) {
// Repaints are posted from updateClient now...
}
// Invoke a paint() method call on the target, after clearing the
// damaged area.
void handleExpose(int x, int y, int w, int h) {
// Bug ID 4081126 & 4129709 - can't do the clearRect() here,
// since it interferes with the java thread working in the
// same window on multi-processor NT machines.
postPaintIfNecessary(x, y, w, h);
}
/* Invoke a paint() method call on the target, without clearing the
* damaged area. This is normally called by a native control after
* it has painted itself.
*
* NOTE: This is called on the privileged toolkit thread. Do not
* call directly into user code using this thread!
*/
public void handlePaint(int x, int y, int w, int h) {
postPaintIfNecessary(x, y, w, h);
}
private void postPaintIfNecessary(int x, int y, int w, int h) {
if ( !AWTAccessor.getComponentAccessor().getIgnoreRepaint( (Component) target) ) {
PaintEvent event = PaintEventDispatcher.getPaintEventDispatcher().
createPaintEvent((Component)target, x, y, w, h);
if (event != null) {
postEvent(event);
}
}
}
/*
* Post an event. Queue it for execution by the callback thread.
*/
void postEvent(AWTEvent event) {
preprocessPostEvent(event);
WToolkit.postEvent(WToolkit.targetToAppContext(target), event);
}
void preprocessPostEvent(AWTEvent event) {}
// Routines to support deferred window positioning.
public void beginLayout() {
// Skip all painting till endLayout
isLayouting = true;
}
public void endLayout() {
if(!paintArea.isEmpty() && !paintPending &&
!((Component)target).getIgnoreRepaint()) {
// if not waiting for native painting repaint damaged area
postEvent(new PaintEvent((Component)target, PaintEvent.PAINT,
new Rectangle()));
}
isLayouting = false;
}
public native void beginValidate();
public native void endValidate();
/**
* DEPRECATED
*/
public Dimension preferredSize() {
return getPreferredSize();
}
/**
* register a DropTarget with this native peer
*/
public synchronized void addDropTarget(DropTarget dt) {
if (nDropTargets == 0) {
nativeDropTargetContext = addNativeDropTarget();
}
nDropTargets++;
}
/**
* unregister a DropTarget with this native peer
*/
public synchronized void removeDropTarget(DropTarget dt) {
nDropTargets--;
if (nDropTargets == 0) {
removeNativeDropTarget();
nativeDropTargetContext = 0;
}
}
/**
* add the native peer's AwtDropTarget COM object
* @return reference to AwtDropTarget object
*/
native long addNativeDropTarget();
/**
* remove the native peer's AwtDropTarget COM object
*/
native void removeNativeDropTarget();
native boolean nativeHandlesWheelScrolling();
public boolean handlesWheelScrolling() {
// should this be cached?
return nativeHandlesWheelScrolling();
}
// Returns true if we are inside begin/endLayout and
// are waiting for native painting
public boolean isPaintPending() {
return paintPending && isLayouting;
}
/**
* The following multibuffering-related methods delegate to our
* associated GraphicsConfig (Win or WGL) to handle the appropriate
* native windowing system specific actions.
*/
@Override
public void createBuffers(int numBuffers, BufferCapabilities caps)
throws AWTException
{
Win32GraphicsConfig gc =
(Win32GraphicsConfig)getGraphicsConfiguration();
gc.assertOperationSupported((Component)target, numBuffers, caps);
// Re-create the primary surface with the new number of back buffers
try {
replaceSurfaceData(numBuffers - 1, caps);
} catch (InvalidPipeException e) {
throw new AWTException(e.getMessage());
}
}
@Override
public void destroyBuffers() {
replaceSurfaceData(0, null);
}
@Override
public void flip(int x1, int y1, int x2, int y2,
BufferCapabilities.FlipContents flipAction)
{
VolatileImage backBuffer = this.backBuffer;
if (backBuffer == null) {
throw new IllegalStateException("Buffers have not been created");
}
Win32GraphicsConfig gc =
(Win32GraphicsConfig)getGraphicsConfiguration();
gc.flip(this, (Component)target, backBuffer, x1, y1, x2, y2, flipAction);
}
@Override
public synchronized Image getBackBuffer() {
Image backBuffer = this.backBuffer;
if (backBuffer == null) {
throw new IllegalStateException("Buffers have not been created");
}
return backBuffer;
}
public BufferCapabilities getBackBufferCaps() {
return backBufferCaps;
}
public int getBackBuffersNum() {
return numBackBuffers;
}
/* override and return false on components that DO NOT require
a clearRect() before painting (i.e. native components) */
public boolean shouldClearRectBeforePaint() {
return true;
}
native void pSetParent(ComponentPeer newNativeParent);
/**
* @see java.awt.peer.ComponentPeer#reparent
*/
public void reparent(ContainerPeer newNativeParent) {
pSetParent(newNativeParent);
}
/**
* @see java.awt.peer.ComponentPeer#isReparentSupported
*/
public boolean isReparentSupported() {
return true;
}
public void setBoundsOperation(int operation) {
}
private volatile boolean isAccelCapable = true;
/**
* Returns whether this component is capable of being hw accelerated.
* More specifically, whether rendering to this component or a
* BufferStrategy's back-buffer for this component can be hw accelerated.
*
* Conditions which could prevent hw acceleration include the toplevel
* window containing this component being
* {@link GraphicsDevice.WindowTranslucency#PERPIXEL_TRANSLUCENT
* PERPIXEL_TRANSLUCENT}.
*
* Another condition is if Xor paint mode was detected when rendering
* to an on-screen accelerated surface associated with this peer.
* in this case both on- and off-screen acceleration for this peer is
* disabled.
*
* @return {@code true} if this component is capable of being hw
* accelerated, {@code false} otherwise
* @see GraphicsDevice.WindowTranslucency#PERPIXEL_TRANSLUCENT
*/
public boolean isAccelCapable() {
if (!isAccelCapable ||
!isContainingTopLevelAccelCapable((Component)target))
{
return false;
}
boolean isTranslucent =
SunToolkit.isContainingTopLevelTranslucent((Component)target);
// D3D/OGL and translucent windows interacted poorly in Windows XP;
// these problems are no longer present in Vista
return !isTranslucent || Win32GraphicsEnvironment.isVistaOS();
}
/**
* Disables acceleration for this peer.
*/
public void disableAcceleration() {
isAccelCapable = false;
}
native void setRectangularShape(int lox, int loy, int hix, int hiy,
Region region);
// REMIND: Temp workaround for issues with using HW acceleration
// in the browser on Vista when DWM is enabled.
// @return true if the toplevel container is not an EmbeddedFrame or
// if this EmbeddedFrame is acceleration capable, false otherwise
@SuppressWarnings("deprecation")
private static final boolean isContainingTopLevelAccelCapable(Component c) {
while (c != null && !(c instanceof WEmbeddedFrame)) {
c = c.getParent();
}
if (c == null) {
return true;
}
return ((WEmbeddedFramePeer)c.getPeer()).isAccelCapable();
}
/**
* Applies the shape to the native component window.
* @since 1.7
*/
@SuppressWarnings("deprecation")
public void applyShape(Region shape) {
if (shapeLog.isLoggable(PlatformLogger.Level.FINER)) {
shapeLog.finer("*** INFO: Setting shape: PEER: " + this
+ "; TARGET: " + target
+ "; SHAPE: " + shape);
}
if (shape != null) {
setRectangularShape(shape.getLoX(), shape.getLoY(), shape.getHiX(), shape.getHiY(),
(shape.isRectangular() ? null : shape));
} else {
setRectangularShape(0, 0, 0, 0, null);
}
}
/**
* Lowers this component at the bottom of the above component. If the above parameter
* is null then the method places this component at the top of the Z-order.
*/
public void setZOrder(ComponentPeer above) {
long aboveHWND = (above != null) ? ((WComponentPeer)above).getHWnd() : 0;
setZOrder(aboveHWND);
}
private native void setZOrder(long above);
public boolean isLightweightFramePeer() {
return false;
}
}
|
oracle/graal | 38,486 | substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/dynamicaccessinference/dataflow/AbstractInterpreter.java | /*
* Copyright (c) 2025, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.hosted.dynamicaccessinference.dataflow;
import java.util.List;
import java.util.stream.IntStream;
import jdk.graal.compiler.bytecode.Bytecode;
import jdk.graal.compiler.bytecode.BytecodeStream;
import jdk.vm.ci.meta.Constant;
import jdk.vm.ci.meta.ConstantPool;
import jdk.vm.ci.meta.JavaConstant;
import jdk.vm.ci.meta.JavaField;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.JavaMethod;
import jdk.vm.ci.meta.JavaType;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.meta.Signature;
import static jdk.graal.compiler.bytecode.Bytecodes.AALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.AASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.ACONST_NULL;
import static jdk.graal.compiler.bytecode.Bytecodes.ALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.ALOAD_0;
import static jdk.graal.compiler.bytecode.Bytecodes.ALOAD_1;
import static jdk.graal.compiler.bytecode.Bytecodes.ALOAD_2;
import static jdk.graal.compiler.bytecode.Bytecodes.ALOAD_3;
import static jdk.graal.compiler.bytecode.Bytecodes.ANEWARRAY;
import static jdk.graal.compiler.bytecode.Bytecodes.ARETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.ARRAYLENGTH;
import static jdk.graal.compiler.bytecode.Bytecodes.ASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.ASTORE_0;
import static jdk.graal.compiler.bytecode.Bytecodes.ASTORE_1;
import static jdk.graal.compiler.bytecode.Bytecodes.ASTORE_2;
import static jdk.graal.compiler.bytecode.Bytecodes.ASTORE_3;
import static jdk.graal.compiler.bytecode.Bytecodes.ATHROW;
import static jdk.graal.compiler.bytecode.Bytecodes.BALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.BASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.BIPUSH;
import static jdk.graal.compiler.bytecode.Bytecodes.BREAKPOINT;
import static jdk.graal.compiler.bytecode.Bytecodes.CALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.CASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.CHECKCAST;
import static jdk.graal.compiler.bytecode.Bytecodes.D2F;
import static jdk.graal.compiler.bytecode.Bytecodes.D2I;
import static jdk.graal.compiler.bytecode.Bytecodes.D2L;
import static jdk.graal.compiler.bytecode.Bytecodes.DADD;
import static jdk.graal.compiler.bytecode.Bytecodes.DALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.DASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.DCMPG;
import static jdk.graal.compiler.bytecode.Bytecodes.DCMPL;
import static jdk.graal.compiler.bytecode.Bytecodes.DCONST_0;
import static jdk.graal.compiler.bytecode.Bytecodes.DCONST_1;
import static jdk.graal.compiler.bytecode.Bytecodes.DDIV;
import static jdk.graal.compiler.bytecode.Bytecodes.DLOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.DLOAD_0;
import static jdk.graal.compiler.bytecode.Bytecodes.DLOAD_1;
import static jdk.graal.compiler.bytecode.Bytecodes.DLOAD_2;
import static jdk.graal.compiler.bytecode.Bytecodes.DLOAD_3;
import static jdk.graal.compiler.bytecode.Bytecodes.DMUL;
import static jdk.graal.compiler.bytecode.Bytecodes.DNEG;
import static jdk.graal.compiler.bytecode.Bytecodes.DREM;
import static jdk.graal.compiler.bytecode.Bytecodes.DRETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.DSTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.DSTORE_0;
import static jdk.graal.compiler.bytecode.Bytecodes.DSTORE_1;
import static jdk.graal.compiler.bytecode.Bytecodes.DSTORE_2;
import static jdk.graal.compiler.bytecode.Bytecodes.DSTORE_3;
import static jdk.graal.compiler.bytecode.Bytecodes.DSUB;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP2;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP2_X1;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP2_X2;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP_X1;
import static jdk.graal.compiler.bytecode.Bytecodes.DUP_X2;
import static jdk.graal.compiler.bytecode.Bytecodes.F2D;
import static jdk.graal.compiler.bytecode.Bytecodes.F2I;
import static jdk.graal.compiler.bytecode.Bytecodes.F2L;
import static jdk.graal.compiler.bytecode.Bytecodes.FADD;
import static jdk.graal.compiler.bytecode.Bytecodes.FALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.FASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.FCMPG;
import static jdk.graal.compiler.bytecode.Bytecodes.FCMPL;
import static jdk.graal.compiler.bytecode.Bytecodes.FCONST_0;
import static jdk.graal.compiler.bytecode.Bytecodes.FCONST_1;
import static jdk.graal.compiler.bytecode.Bytecodes.FCONST_2;
import static jdk.graal.compiler.bytecode.Bytecodes.FDIV;
import static jdk.graal.compiler.bytecode.Bytecodes.FLOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.FLOAD_0;
import static jdk.graal.compiler.bytecode.Bytecodes.FLOAD_1;
import static jdk.graal.compiler.bytecode.Bytecodes.FLOAD_2;
import static jdk.graal.compiler.bytecode.Bytecodes.FLOAD_3;
import static jdk.graal.compiler.bytecode.Bytecodes.FMUL;
import static jdk.graal.compiler.bytecode.Bytecodes.FNEG;
import static jdk.graal.compiler.bytecode.Bytecodes.FREM;
import static jdk.graal.compiler.bytecode.Bytecodes.FRETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.FSTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.FSTORE_0;
import static jdk.graal.compiler.bytecode.Bytecodes.FSTORE_1;
import static jdk.graal.compiler.bytecode.Bytecodes.FSTORE_2;
import static jdk.graal.compiler.bytecode.Bytecodes.FSTORE_3;
import static jdk.graal.compiler.bytecode.Bytecodes.FSUB;
import static jdk.graal.compiler.bytecode.Bytecodes.GETFIELD;
import static jdk.graal.compiler.bytecode.Bytecodes.GETSTATIC;
import static jdk.graal.compiler.bytecode.Bytecodes.GOTO;
import static jdk.graal.compiler.bytecode.Bytecodes.GOTO_W;
import static jdk.graal.compiler.bytecode.Bytecodes.I2B;
import static jdk.graal.compiler.bytecode.Bytecodes.I2C;
import static jdk.graal.compiler.bytecode.Bytecodes.I2D;
import static jdk.graal.compiler.bytecode.Bytecodes.I2F;
import static jdk.graal.compiler.bytecode.Bytecodes.I2L;
import static jdk.graal.compiler.bytecode.Bytecodes.I2S;
import static jdk.graal.compiler.bytecode.Bytecodes.IADD;
import static jdk.graal.compiler.bytecode.Bytecodes.IALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.IAND;
import static jdk.graal.compiler.bytecode.Bytecodes.IASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_0;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_1;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_2;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_3;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_4;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_5;
import static jdk.graal.compiler.bytecode.Bytecodes.ICONST_M1;
import static jdk.graal.compiler.bytecode.Bytecodes.IDIV;
import static jdk.graal.compiler.bytecode.Bytecodes.IFEQ;
import static jdk.graal.compiler.bytecode.Bytecodes.IFGE;
import static jdk.graal.compiler.bytecode.Bytecodes.IFGT;
import static jdk.graal.compiler.bytecode.Bytecodes.IFLE;
import static jdk.graal.compiler.bytecode.Bytecodes.IFLT;
import static jdk.graal.compiler.bytecode.Bytecodes.IFNE;
import static jdk.graal.compiler.bytecode.Bytecodes.IFNONNULL;
import static jdk.graal.compiler.bytecode.Bytecodes.IFNULL;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ACMPEQ;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ACMPNE;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPEQ;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPGE;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPGT;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPLE;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPLT;
import static jdk.graal.compiler.bytecode.Bytecodes.IF_ICMPNE;
import static jdk.graal.compiler.bytecode.Bytecodes.IINC;
import static jdk.graal.compiler.bytecode.Bytecodes.ILOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.ILOAD_0;
import static jdk.graal.compiler.bytecode.Bytecodes.ILOAD_1;
import static jdk.graal.compiler.bytecode.Bytecodes.ILOAD_2;
import static jdk.graal.compiler.bytecode.Bytecodes.ILOAD_3;
import static jdk.graal.compiler.bytecode.Bytecodes.IMUL;
import static jdk.graal.compiler.bytecode.Bytecodes.INEG;
import static jdk.graal.compiler.bytecode.Bytecodes.INSTANCEOF;
import static jdk.graal.compiler.bytecode.Bytecodes.INVOKEDYNAMIC;
import static jdk.graal.compiler.bytecode.Bytecodes.INVOKEINTERFACE;
import static jdk.graal.compiler.bytecode.Bytecodes.INVOKESPECIAL;
import static jdk.graal.compiler.bytecode.Bytecodes.INVOKESTATIC;
import static jdk.graal.compiler.bytecode.Bytecodes.INVOKEVIRTUAL;
import static jdk.graal.compiler.bytecode.Bytecodes.IOR;
import static jdk.graal.compiler.bytecode.Bytecodes.IREM;
import static jdk.graal.compiler.bytecode.Bytecodes.IRETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.ISHL;
import static jdk.graal.compiler.bytecode.Bytecodes.ISHR;
import static jdk.graal.compiler.bytecode.Bytecodes.ISTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.ISTORE_0;
import static jdk.graal.compiler.bytecode.Bytecodes.ISTORE_1;
import static jdk.graal.compiler.bytecode.Bytecodes.ISTORE_2;
import static jdk.graal.compiler.bytecode.Bytecodes.ISTORE_3;
import static jdk.graal.compiler.bytecode.Bytecodes.ISUB;
import static jdk.graal.compiler.bytecode.Bytecodes.IUSHR;
import static jdk.graal.compiler.bytecode.Bytecodes.IXOR;
import static jdk.graal.compiler.bytecode.Bytecodes.JSR;
import static jdk.graal.compiler.bytecode.Bytecodes.JSR_W;
import static jdk.graal.compiler.bytecode.Bytecodes.L2D;
import static jdk.graal.compiler.bytecode.Bytecodes.L2F;
import static jdk.graal.compiler.bytecode.Bytecodes.L2I;
import static jdk.graal.compiler.bytecode.Bytecodes.LADD;
import static jdk.graal.compiler.bytecode.Bytecodes.LALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.LAND;
import static jdk.graal.compiler.bytecode.Bytecodes.LASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.LCMP;
import static jdk.graal.compiler.bytecode.Bytecodes.LCONST_0;
import static jdk.graal.compiler.bytecode.Bytecodes.LCONST_1;
import static jdk.graal.compiler.bytecode.Bytecodes.LDC;
import static jdk.graal.compiler.bytecode.Bytecodes.LDC2_W;
import static jdk.graal.compiler.bytecode.Bytecodes.LDC_W;
import static jdk.graal.compiler.bytecode.Bytecodes.LDIV;
import static jdk.graal.compiler.bytecode.Bytecodes.LLOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.LLOAD_0;
import static jdk.graal.compiler.bytecode.Bytecodes.LLOAD_1;
import static jdk.graal.compiler.bytecode.Bytecodes.LLOAD_2;
import static jdk.graal.compiler.bytecode.Bytecodes.LLOAD_3;
import static jdk.graal.compiler.bytecode.Bytecodes.LMUL;
import static jdk.graal.compiler.bytecode.Bytecodes.LNEG;
import static jdk.graal.compiler.bytecode.Bytecodes.LOOKUPSWITCH;
import static jdk.graal.compiler.bytecode.Bytecodes.LOR;
import static jdk.graal.compiler.bytecode.Bytecodes.LREM;
import static jdk.graal.compiler.bytecode.Bytecodes.LRETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.LSHL;
import static jdk.graal.compiler.bytecode.Bytecodes.LSHR;
import static jdk.graal.compiler.bytecode.Bytecodes.LSTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.LSTORE_0;
import static jdk.graal.compiler.bytecode.Bytecodes.LSTORE_1;
import static jdk.graal.compiler.bytecode.Bytecodes.LSTORE_2;
import static jdk.graal.compiler.bytecode.Bytecodes.LSTORE_3;
import static jdk.graal.compiler.bytecode.Bytecodes.LSUB;
import static jdk.graal.compiler.bytecode.Bytecodes.LUSHR;
import static jdk.graal.compiler.bytecode.Bytecodes.LXOR;
import static jdk.graal.compiler.bytecode.Bytecodes.MONITORENTER;
import static jdk.graal.compiler.bytecode.Bytecodes.MONITOREXIT;
import static jdk.graal.compiler.bytecode.Bytecodes.MULTIANEWARRAY;
import static jdk.graal.compiler.bytecode.Bytecodes.NEW;
import static jdk.graal.compiler.bytecode.Bytecodes.NEWARRAY;
import static jdk.graal.compiler.bytecode.Bytecodes.NOP;
import static jdk.graal.compiler.bytecode.Bytecodes.POP;
import static jdk.graal.compiler.bytecode.Bytecodes.POP2;
import static jdk.graal.compiler.bytecode.Bytecodes.PUTFIELD;
import static jdk.graal.compiler.bytecode.Bytecodes.PUTSTATIC;
import static jdk.graal.compiler.bytecode.Bytecodes.RET;
import static jdk.graal.compiler.bytecode.Bytecodes.RETURN;
import static jdk.graal.compiler.bytecode.Bytecodes.SALOAD;
import static jdk.graal.compiler.bytecode.Bytecodes.SASTORE;
import static jdk.graal.compiler.bytecode.Bytecodes.SIPUSH;
import static jdk.graal.compiler.bytecode.Bytecodes.SWAP;
import static jdk.graal.compiler.bytecode.Bytecodes.TABLESWITCH;
/**
* A {@link ForwardDataFlowAnalyzer} where the data-flow state is represented by an abstract
* bytecode execution frame. This analyzer assumes that the provided bytecode is valid and verified
* by a bytecode verifier.
* <p>
* The interpreter records {@link AbstractFrame abstract frames} for each instruction in the
* bytecode sequence of a method. Each abstract frame represents the abstract state before the
* would-be execution of the corresponding bytecode instruction.
* <p>
* JSR and RET opcodes are currently unsupported, and a {@link DataFlowAnalysisException} will be
* thrown in case the analyzed method contains them.
*
* @param <T> The abstract representation of values pushed and popped from the operand stack and
* stored in the local variable table.
*/
public abstract class AbstractInterpreter<T> extends ForwardDataFlowAnalyzer<AbstractFrame<T>> {
@Override
protected AbstractFrame<T> createInitialState(ResolvedJavaMethod method) {
/*
* The initial state has an empty operand stack and local variable table slots containing
* values corresponding to the method arguments and receiver (if non-static).
*/
AbstractFrame<T> state = new AbstractFrame<>(method);
int variableIndex = 0;
if (method.hasReceiver()) {
state.localVariableTable.put(defaultValue(), variableIndex, false);
variableIndex++;
}
Signature signature = method.getSignature();
int numOfParameters = signature.getParameterCount(false);
for (int i = 0; i < numOfParameters; i++) {
boolean parameterNeedsTwoSlots = signature.getParameterKind(i).needsTwoSlots();
state.localVariableTable.put(defaultValue(), variableIndex, parameterNeedsTwoSlots);
variableIndex += parameterNeedsTwoSlots ? 2 : 1;
}
return state;
}
@Override
protected AbstractFrame<T> createExceptionState(AbstractFrame<T> inState, List<JavaType> exceptionTypes) {
/*
* The initial frame state in exception handlers is created by clearing the operand stack
* and placing the caught exception object on it.
*/
AbstractFrame<T> exceptionState = new AbstractFrame<>(inState);
exceptionState.operandStack.clear();
exceptionState.operandStack.push(defaultValue(), false);
return exceptionState;
}
@Override
protected AbstractFrame<T> copyState(AbstractFrame<T> state) {
return new AbstractFrame<>(state);
}
@Override
protected AbstractFrame<T> mergeStates(AbstractFrame<T> left, AbstractFrame<T> right) {
return left.merge(right, this::merge);
}
@Override
@SuppressWarnings("DuplicateBranchesInSwitch")
protected AbstractFrame<T> processInstruction(AbstractFrame<T> inState, BytecodeStream stream, Bytecode code) {
AbstractFrame<T> outState = copyState(inState);
var stack = outState.operandStack;
var variables = outState.localVariableTable;
int bci = stream.currentBCI();
int opcode = stream.currentBC();
InstructionContext<T> context = new InstructionContext<>(code.getMethod(), bci, opcode, outState);
ConstantPool cp = code.getConstantPool();
// @formatter:off
// Checkstyle: stop
switch (opcode) {
case NOP : break;
case ACONST_NULL : handleConstant(context, JavaConstant.NULL_POINTER, false); break;
case ICONST_M1 : handleConstant(context, JavaConstant.forInt(-1), false); break;
case ICONST_0 : // fall through
case ICONST_1 : // fall through
case ICONST_2 : // fall through
case ICONST_3 : // fall through
case ICONST_4 : // fall through
case ICONST_5 : handleConstant(context, JavaConstant.forInt(opcode - ICONST_0), false); break;
case LCONST_0 : // fall through
case LCONST_1 : handleConstant(context, JavaConstant.forLong(opcode - LCONST_0), true); break;
case FCONST_0 : // fall through
case FCONST_1 : // fall through
case FCONST_2 : handleConstant(context, JavaConstant.forFloat(opcode - FCONST_0), false); break;
case DCONST_0 : // fall through
case DCONST_1 : handleConstant(context, JavaConstant.forDouble(opcode - DCONST_0), true); break;
case BIPUSH : handleConstant(context, JavaConstant.forByte(stream.readByte()), false); break;
case SIPUSH : handleConstant(context, JavaConstant.forShort(stream.readShort()), false); break;
case LDC : // fall through
case LDC_W : handleConstant(context, lookupConstant(cp, stream.readCPI(), opcode), false); break;
case LDC2_W : handleConstant(context, lookupConstant(cp, stream.readCPI(), opcode), true); break;
case ILOAD : handleVariableLoad(context, stream.readLocalIndex(), false); break;
case LLOAD : handleVariableLoad(context, stream.readLocalIndex(), true); break;
case FLOAD : handleVariableLoad(context, stream.readLocalIndex(), false); break;
case DLOAD : handleVariableLoad(context, stream.readLocalIndex(), true); break;
case ALOAD : handleVariableLoad(context, stream.readLocalIndex(), false); break;
case ILOAD_0 : // fall through
case ILOAD_1 : // fall through
case ILOAD_2 : // fall through
case ILOAD_3 : handleVariableLoad(context, opcode - ILOAD_0, false); break;
case LLOAD_0 : // fall through
case LLOAD_1 : // fall through
case LLOAD_2 : // fall through
case LLOAD_3 : handleVariableLoad(context, opcode - LLOAD_0, true); break;
case FLOAD_0 : // fall through
case FLOAD_1 : // fall through
case FLOAD_2 : // fall through
case FLOAD_3 : handleVariableLoad(context, opcode - FLOAD_0, false); break;
case DLOAD_0 : // fall through
case DLOAD_1 : // fall through
case DLOAD_2 : // fall through
case DLOAD_3 : handleVariableLoad(context, opcode - DLOAD_0, true); break;
case ALOAD_0 : // fall through
case ALOAD_1 : // fall through
case ALOAD_2 : // fall through
case ALOAD_3 : handleVariableLoad(context, opcode - ALOAD_0, false); break;
case IALOAD : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case LALOAD : stack.pop(); stack.pop(); stack.push(defaultValue(), true); break;
case FALOAD : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case DALOAD : stack.pop(); stack.pop(); stack.push(defaultValue(), true); break;
case AALOAD : // fall through
case BALOAD : // fall through
case CALOAD : // fall through
case SALOAD : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case ISTORE : handleVariableStore(context, stream.readLocalIndex(), false); break;
case LSTORE : handleVariableStore(context, stream.readLocalIndex(), true); break;
case FSTORE : handleVariableStore(context, stream.readLocalIndex(), false); break;
case DSTORE : handleVariableStore(context, stream.readLocalIndex(), true); break;
case ASTORE : handleVariableStore(context, stream.readLocalIndex(), false); break;
case ISTORE_0 : // fall through
case ISTORE_1 : // fall through
case ISTORE_2 : // fall through
case ISTORE_3 : handleVariableStore(context, opcode - ISTORE_0, false); break;
case LSTORE_0 : // fall through
case LSTORE_1 : // fall through
case LSTORE_2 : // fall through
case LSTORE_3 : handleVariableStore(context, opcode - LSTORE_0, true); break;
case FSTORE_0 : // fall through
case FSTORE_1 : // fall through
case FSTORE_2 : // fall through
case FSTORE_3 : handleVariableStore(context, opcode - FSTORE_0, false); break;
case DSTORE_0 : // fall through
case DSTORE_1 : // fall through
case DSTORE_2 : // fall through
case DSTORE_3 : handleVariableStore(context, opcode - DSTORE_0, true); break;
case ASTORE_0 : // fall through
case ASTORE_1 : // fall through
case ASTORE_2 : // fall through
case ASTORE_3 : handleVariableStore(context, opcode - ASTORE_0, false); break;
case IASTORE : // fall through
case LASTORE : // fall through
case FASTORE : // fall through
case DASTORE : // fall through
case AASTORE : // fall through
case BASTORE : // fall through
case CASTORE : // fall through
case SASTORE : handleArrayElementStore(context); break;
case POP : stack.applyPop(); break;
case POP2 : stack.applyPop2(); break;
case DUP : stack.applyDup(); break;
case DUP_X1 : stack.applyDupX1(); break;
case DUP_X2 : stack.applyDupX2(); break;
case DUP2 : stack.applyDup2(); break;
case DUP2_X1 : stack.applyDup2X1(); break;
case DUP2_X2 : stack.applyDup2X2(); break;
case SWAP : stack.applySwap(); break;
case IADD : // fall through
case ISUB : // fall through
case IMUL : // fall through
case IDIV : // fall through
case IREM : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case LADD : // fall through
case LSUB : // fall through
case LMUL : // fall through
case LDIV : // fall through
case LREM : stack.pop(); stack.pop(); stack.push(defaultValue(), true); break;
case FADD : // fall through
case FSUB : // fall through
case FMUL : // fall through
case FDIV : // fall through
case FREM : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case DADD : // fall through
case DSUB : // fall through
case DMUL : // fall through
case DDIV : // fall through
case DREM : stack.pop(); stack.pop(); stack.push(defaultValue(), true); break;
case INEG : stack.pop(); stack.push(defaultValue(), false); break;
case LNEG : stack.pop(); stack.push(defaultValue(), true); break;
case FNEG : stack.pop(); stack.push(defaultValue(), false); break;
case DNEG : stack.pop(); stack.push(defaultValue(), true); break;
case ISHL : // fall through
case ISHR : // fall through
case IUSHR : // fall through
case IAND : // fall through
case IOR : // fall through
case IXOR : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case LSHL : // fall through
case LSHR : // fall through
case LUSHR : // fall through
case LAND : // fall through
case LOR : // fall through
case LXOR : stack.pop(); stack.pop(); stack.push(defaultValue(), true); break;
case IINC : variables.put(defaultValue(), stream.readLocalIndex(), false); break;
case I2F : stack.pop(); stack.push(defaultValue(), false); break;
case I2D : stack.pop(); stack.push(defaultValue(), true); break;
case L2F : stack.pop(); stack.push(defaultValue(), false); break;
case L2D : stack.pop(); stack.push(defaultValue(), true); break;
case F2I : stack.pop(); stack.push(defaultValue(), false); break;
case F2L : // fall through
case F2D : stack.pop(); stack.push(defaultValue(), true); break;
case D2I : stack.pop(); stack.push(defaultValue(), false); break;
case D2L : stack.pop(); stack.push(defaultValue(), true); break;
case D2F : // fall through
case L2I : stack.pop(); stack.push(defaultValue(), false); break;
case I2L : stack.pop(); stack.push(defaultValue(), true); break;
case I2B : // fall through
case I2S : // fall through
case I2C : stack.pop(); stack.push(defaultValue(), false); break;
case LCMP : // fall through
case FCMPL : // fall through
case FCMPG : // fall through
case DCMPL : // fall through
case DCMPG : stack.pop(); stack.pop(); stack.push(defaultValue(), false); break;
case IFEQ : // fall through
case IFNE : // fall through
case IFLT : // fall through
case IFGE : // fall through
case IFGT : // fall through
case IFLE : stack.pop(); break;
case IF_ICMPEQ : // fall through
case IF_ICMPNE : // fall through
case IF_ICMPLT : // fall through
case IF_ICMPGE : // fall through
case IF_ICMPGT : // fall through
case IF_ICMPLE : // fall through
case IF_ACMPEQ : // fall through
case IF_ACMPNE : stack.pop(); stack.pop(); break;
case GOTO : break;
case JSR : // fall through
case RET : throw new DataFlowAnalysisException("Unsupported opcode " + opcode);
case TABLESWITCH : // fall through
case LOOKUPSWITCH : stack.pop(); break;
case IRETURN : // fall through
case LRETURN : // fall through
case FRETURN : // fall through
case DRETURN : // fall through
case ARETURN : stack.pop(); break;
case RETURN : break;
case GETSTATIC : handleStaticFieldLoad(context, lookupField(cp, stream.readCPI(), opcode, code.getMethod())); break;
case PUTSTATIC : onValueEscape(context, stack.pop()); break;
case GETFIELD : handleFieldLoad(context, lookupField(cp, stream.readCPI(), opcode, code.getMethod())); break;
case PUTFIELD : onValueEscape(context, stack.pop()); stack.pop(); break;
case INVOKEVIRTUAL : handleInvoke(context, lookupMethod(cp, stream.readCPI(), opcode, code.getMethod()), lookupAppendix(cp, stream.readCPI(), opcode)); break;
case INVOKESPECIAL : // fall through
case INVOKESTATIC : // fall through
case INVOKEINTERFACE: handleInvoke(context, lookupMethod(cp, stream.readCPI(), opcode, code.getMethod()), null); break;
case INVOKEDYNAMIC : handleInvoke(context, lookupMethod(cp, stream.readCPI4(), opcode, code.getMethod()), lookupAppendix(cp, stream.readCPI4(), opcode)); break;
case NEW : stack.push(defaultValue(), false); break;
case NEWARRAY : stack.pop(); stack.push(defaultValue(), false); break;
case ANEWARRAY : handleNewObjectArray(context, lookupType(cp, stream.readCPI(), opcode)); break;
case ARRAYLENGTH : stack.pop(); stack.push(defaultValue(), false); break;
case ATHROW : stack.pop(); break;
case CHECKCAST : handleCastCheck(context, lookupType(cp, stream.readCPI(), opcode)); break;
case INSTANCEOF : stack.pop(); stack.push(defaultValue(), false); break;
case MONITORENTER : // fall through
case MONITOREXIT : stack.pop(); break;
case MULTIANEWARRAY : popOperands(stack, stream.readUByte(bci + 3)); stack.push(defaultValue(), false); break;
case IFNULL : // fall through
case IFNONNULL : stack.pop(); break;
case GOTO_W : break;
case JSR_W : // fall through
case BREAKPOINT : // fall through
default : throw new DataFlowAnalysisException("Unsupported opcode " + opcode);
}
// @formatter:on
// Checkstyle: resume
return outState;
}
/**
* Execution context of a bytecode instruction.
*
* @param method The method to which this instruction belongs.
* @param bci The bytecode index of this instruction.
* @param opcode The opcode of this instruction.
* @param state The abstract state of the bytecode frame right before the execution of this
* instruction (its input state). Any modifications of the {@code state} will be
* reflected on the input state of successor instructions.
*/
protected record InstructionContext<T>(ResolvedJavaMethod method, int bci, int opcode, AbstractFrame<T> state) {
}
/**
* @return The default abstract value. This value usually represents an over-saturated value
* from which no useful information can be inferred.
*/
protected abstract T defaultValue();
/**
* Merge two matching operand stack or local variable table values from divergent control-flow
* paths.
*
* @return The merged value.
*/
protected abstract T merge(T left, T right);
protected abstract T loadConstant(InstructionContext<T> context, Constant constant);
protected abstract T loadType(InstructionContext<T> context, JavaType type);
protected abstract T loadVariable(InstructionContext<T> context, T value);
protected abstract T loadStaticField(InstructionContext<T> context, JavaField field);
protected abstract T storeVariable(InstructionContext<T> context, T value);
protected abstract void storeArrayElement(InstructionContext<T> context, T array, T index, T value);
protected abstract T invokeNonVoidMethod(InstructionContext<T> context, JavaMethod method, T receiver, List<T> operands);
protected abstract T newObjectArray(InstructionContext<T> context, JavaType type, T size);
protected abstract T checkCast(InstructionContext<T> context, JavaType type, T object);
/**
* This method is invoked whenever a {@code value} escapes {@link AbstractFrame}, be it by
* storing it in an array, a field, or using it as a method argument.
*/
protected abstract void onValueEscape(InstructionContext<T> context, T value);
protected abstract Object lookupConstant(ConstantPool constantPool, int cpi, int opcode);
protected abstract JavaType lookupType(ConstantPool constantPool, int cpi, int opcode);
protected abstract JavaMethod lookupMethod(ConstantPool constantPool, int cpi, int opcode, ResolvedJavaMethod caller);
protected abstract JavaConstant lookupAppendix(ConstantPool constantPool, int cpi, int opcode);
protected abstract JavaField lookupField(ConstantPool constantPool, int cpi, int opcode, ResolvedJavaMethod caller);
private List<T> popOperands(AbstractFrame.OperandStack<T> stack, int n) {
return IntStream.range(0, n).mapToObj(_ -> stack.pop()).toList().reversed();
}
private void handleConstant(InstructionContext<T> context, Object value, boolean needsTwoSlots) {
var stack = context.state.operandStack;
if (value == null) {
/*
* The constant is an unresolved JVM_CONSTANT_Dynamic, JVM_CONSTANT_MethodHandle or
* JVM_CONSTANT_MethodType.
*/
stack.push(loadConstant(context, null), needsTwoSlots);
} else {
if (value instanceof Constant constant) {
stack.push(loadConstant(context, constant), needsTwoSlots);
} else if (value instanceof JavaType type) {
assert !needsTwoSlots : "Type references occupy a single stack slot";
stack.push(loadType(context, type), false);
}
}
}
private void handleVariableLoad(InstructionContext<T> context, int index, boolean needsTwoSlots) {
T value = context.state.localVariableTable.get(index);
context.state.operandStack.push(loadVariable(context, value), needsTwoSlots);
}
private void handleVariableStore(InstructionContext<T> context, int index, boolean needsTwoSlots) {
T value = context.state.operandStack.pop();
context.state.localVariableTable.put(storeVariable(context, value), index, needsTwoSlots);
}
private void handleInvoke(InstructionContext<T> context, JavaMethod method, JavaConstant appendix) {
var stack = context.state.operandStack;
if (appendix != null) {
stack.push(defaultValue(), false);
}
/*
* HotSpot can rewrite some (method handle related) invocations, which can potentially lead
* to an INVOKEVIRTUAL instruction actually invoking a static method. This means that we
* cannot rely on the opcode to determine if the call has a receiver.
*
* https://wiki.openjdk.org/display/HotSpot/Method+handles+and+invokedynamic
*/
boolean hasReceiver;
if (method instanceof ResolvedJavaMethod resolved) {
hasReceiver = resolved.hasReceiver();
} else {
hasReceiver = context.opcode != INVOKESTATIC && context.opcode != INVOKEDYNAMIC;
}
Signature signature = method.getSignature();
T receiver = null;
if (hasReceiver) {
receiver = stack.pop();
onValueEscape(context, receiver);
}
List<T> operands = popOperands(stack, signature.getParameterCount(false));
operands.forEach(op -> onValueEscape(context, op));
JavaKind returnKind = signature.getReturnKind();
if (!returnKind.equals(JavaKind.Void)) {
T returnValue = invokeNonVoidMethod(context, method, receiver, operands);
stack.push(returnValue, returnKind.needsTwoSlots());
}
}
private void handleStaticFieldLoad(InstructionContext<T> context, JavaField field) {
T value = loadStaticField(context, field);
context.state.operandStack.push(value, field.getJavaKind().needsTwoSlots());
}
private void handleFieldLoad(InstructionContext<T> context, JavaField field) {
context.state.operandStack.pop();
context.state.operandStack.push(defaultValue(), field.getJavaKind().needsTwoSlots());
}
private void handleNewObjectArray(InstructionContext<T> context, JavaType type) {
T size = context.state.operandStack.pop();
context.state.operandStack.push(newObjectArray(context, type, size), false);
}
private void handleArrayElementStore(InstructionContext<T> context) {
var stack = context.state.operandStack;
T value = stack.pop();
T index = stack.pop();
T array = stack.pop();
onValueEscape(context, value);
storeArrayElement(context, array, index, value);
}
private void handleCastCheck(InstructionContext<T> context, JavaType type) {
T object = context.state.operandStack.pop();
context.state.operandStack.push(checkCast(context, type, object), false);
}
}
|
apache/hop | 38,140 | plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandraoutput/CassandraOutputDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.cassandraoutput;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.hop.core.Const;
import org.apache.hop.core.Props;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.databases.cassandra.datastax.DriverConnection;
import org.apache.hop.databases.cassandra.metadata.CassandraConnection;
import org.apache.hop.databases.cassandra.spi.ITableMetaData;
import org.apache.hop.databases.cassandra.spi.Keyspace;
import org.apache.hop.databases.cassandra.util.CassandraUtils;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.TransformMeta;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.EnterSelectionDialog;
import org.apache.hop.ui.core.dialog.ErrorDialog;
import org.apache.hop.ui.core.dialog.ShowMessageDialog;
import org.apache.hop.ui.core.gui.GuiResource;
import org.apache.hop.ui.core.widget.MetaSelectionLine;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
/** Dialog class for the CassandraOutput transform. */
public class CassandraOutputDialog extends BaseTransformDialog {
private static final Class<?> PKG = CassandraOutputMeta.class;
public static final String CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS =
"CassandraOutputData.Message.NoIncomingFields";
public static final String
CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE =
"CassandraOutputDialog.Error.ProblemGettingSchemaInfo.Message";
public static final String CONST_NEWLINES = ":\n\n";
public static final String CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_TITLE =
"CassandraOutputDialog.Error.ProblemGettingSchemaInfo.Title";
public static final String CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS_TITLE =
"CassandraOutputData.Message.NoIncomingFields.Title";
private final CassandraOutputMeta input;
/** various UI bits and pieces for the dialog */
private CTabFolder wTabFolder;
private MetaSelectionLine<CassandraConnection> wConnection;
private CCombo wTable;
private TextVar wConsistency;
private TextVar wBatchSize;
private TextVar wBatchInsertTimeout;
private TextVar wSubBatchSize;
private Button wUnloggedBatch;
private TextVar wKeyField;
private Button wbCreateTable;
private TextVar wWithClause;
private Button wTruncateTable;
private Button wUpdateTableMetaData;
private Button wInsertFieldsNotInTableMeta;
private CCombo wTtlUnits;
private TextVar wTtlValue;
public CassandraOutputDialog(
Shell parent,
IVariables variables,
CassandraOutputMeta transformMeta,
PipelineMeta pipelineMeta) {
super(parent, variables, transformMeta, pipelineMeta);
input = transformMeta;
}
@Override
public String open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX);
PropsUi.setLook(shell);
setShellImage(shell, input);
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = PropsUi.getFormMargin();
formLayout.marginHeight = PropsUi.getFormMargin();
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Shell.Title"));
int middle = props.getMiddlePct();
int margin = PropsUi.getMargin();
// Buttons at the bottom of the dialog
//
wOk = new Button(shell, SWT.PUSH | SWT.CENTER);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wOk.addListener(SWT.Selection, e -> ok());
wCancel = new Button(shell, SWT.PUSH | SWT.CENTER);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
wCancel.addListener(SWT.Selection, e -> cancel());
setButtonPositions(new Button[] {wOk, wCancel}, margin, wTabFolder);
// transformName line
wlTransformName = new Label(shell, SWT.RIGHT);
wlTransformName.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.transformName.Label"));
PropsUi.setLook(wlTransformName);
FormData fdlTransformName = new FormData();
fdlTransformName.left = new FormAttachment(0, 0);
fdlTransformName.right = new FormAttachment(middle, -margin);
fdlTransformName.top = new FormAttachment(0, margin);
wlTransformName.setLayoutData(fdlTransformName);
wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wTransformName.setText(transformName);
PropsUi.setLook(wTransformName);
FormData fdTransformName = new FormData();
fdTransformName.left = new FormAttachment(middle, 0);
fdTransformName.right = new FormAttachment(100, 0);
fdTransformName.top = new FormAttachment(0, margin);
wTransformName.setLayoutData(fdTransformName);
wTabFolder = new CTabFolder(shell, SWT.BORDER);
PropsUi.setLook(wTabFolder, Props.WIDGET_STYLE_TAB);
// start of the connection tab
CTabItem wConnectionTab = new CTabItem(wTabFolder, SWT.BORDER);
wConnectionTab.setFont(GuiResource.getInstance().getFontDefault());
wConnectionTab.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Tab.Connection"));
Composite wConnectionComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wConnectionComp);
FormLayout connectionLayout = new FormLayout();
connectionLayout.marginWidth = 3;
connectionLayout.marginHeight = 3;
wConnectionComp.setLayout(connectionLayout);
// Connection line
wConnection =
new MetaSelectionLine<>(
variables,
metadataProvider,
CassandraConnection.class,
wConnectionComp,
SWT.NONE,
BaseMessages.getString(PKG, "CassandraOutputDialog.Connection.Label"),
BaseMessages.getString(PKG, "CassandraOutputDialog.Connection.Tooltip"));
PropsUi.setLook(wConnection);
FormData fdConnection = new FormData();
fdConnection.left = new FormAttachment(0, 0);
fdConnection.right = new FormAttachment(100, 0);
fdConnection.top = new FormAttachment(0, margin);
wConnection.setLayoutData(fdConnection);
try {
wConnection.fillItems();
} catch (Exception e) {
new ErrorDialog(shell, "Error", "Error listing Cassandra connection metadata objects", e);
}
FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, 0);
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(100, 0);
wConnectionComp.setLayoutData(fd);
wConnectionComp.layout();
wConnectionTab.setControl(wConnectionComp);
// --- start of the write tab ---
CTabItem wWriteTab = new CTabItem(wTabFolder, SWT.NONE);
wWriteTab.setFont(GuiResource.getInstance().getFontDefault());
wWriteTab.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Tab.Write"));
Composite wWriteComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wWriteComp);
FormLayout writeLayout = new FormLayout();
writeLayout.marginWidth = 3;
writeLayout.marginHeight = 3;
wWriteComp.setLayout(writeLayout);
// table line
Label wlTable = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlTable);
wlTable.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Table.Label"));
FormData fdlTable = new FormData();
fdlTable.left = new FormAttachment(0, 0);
fdlTable.top = new FormAttachment(0, 0);
fdlTable.right = new FormAttachment(middle, -margin);
wlTable.setLayoutData(fdlTable);
Button wbGetTables = new Button(wWriteComp, SWT.PUSH | SWT.CENTER);
PropsUi.setLook(wbGetTables);
wbGetTables.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.GetTable.Button"));
FormData fdbTable = new FormData();
fdbTable.right = new FormAttachment(100, 0);
fdbTable.top = new FormAttachment(wlTable, 0, SWT.CENTER);
wbGetTables.setLayoutData(fdbTable);
wbGetTables.addListener(SWT.Selection, e -> setupTablesCombo());
wTable = new CCombo(wWriteComp, SWT.BORDER);
PropsUi.setLook(wTable);
wTable.addModifyListener(e -> wTable.setToolTipText(variables.resolve(wTable.getText())));
FormData fdTable = new FormData();
fdTable.right = new FormAttachment(wbGetTables, -margin);
fdTable.top = new FormAttachment(wlTable, 0, SWT.CENTER);
fdTable.left = new FormAttachment(middle, 0);
wTable.setLayoutData(fdTable);
// consistency line
Label wlConsistency = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlConsistency);
wlConsistency.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Consistency.Label"));
wlConsistency.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.Consistency.Label.TipText"));
FormData fdlConsistency = new FormData();
fdlConsistency.left = new FormAttachment(0, 0);
fdlConsistency.top = new FormAttachment(wTable, margin);
fdlConsistency.right = new FormAttachment(middle, -margin);
wlConsistency.setLayoutData(fdlConsistency);
wConsistency = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wConsistency);
wConsistency.addModifyListener(
e -> wConsistency.setToolTipText(variables.resolve(wConsistency.getText())));
FormData fdConsistency = new FormData();
fdConsistency.right = new FormAttachment(100, 0);
fdConsistency.top = new FormAttachment(wTable, margin);
fdConsistency.left = new FormAttachment(middle, 0);
wConsistency.setLayoutData(fdConsistency);
// batch size line
Label wlBatchSize = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlBatchSize);
wlBatchSize.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.BatchSize.Label"));
FormData fdlBatchSize = new FormData();
fdlBatchSize.left = new FormAttachment(0, 0);
fdlBatchSize.top = new FormAttachment(wConsistency, margin);
fdlBatchSize.right = new FormAttachment(middle, -margin);
wlBatchSize.setLayoutData(fdlBatchSize);
wlBatchSize.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.BatchSize.TipText"));
wBatchSize = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wBatchSize);
wBatchSize.addModifyListener(
e -> wBatchSize.setToolTipText(variables.resolve(wBatchSize.getText())));
FormData fdBatchSize = new FormData();
fdBatchSize.right = new FormAttachment(100, 0);
fdBatchSize.top = new FormAttachment(wConsistency, margin);
fdBatchSize.left = new FormAttachment(middle, 0);
wBatchSize.setLayoutData(fdBatchSize);
// batch insert timeout
Label wlBatchInsertTimeout = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlBatchInsertTimeout);
wlBatchInsertTimeout.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.BatchInsertTimeout.Label"));
FormData fdlBatchInsertTimeout = new FormData();
fdlBatchInsertTimeout.left = new FormAttachment(0, 0);
fdlBatchInsertTimeout.top = new FormAttachment(wBatchSize, margin);
fdlBatchInsertTimeout.right = new FormAttachment(middle, -margin);
wlBatchInsertTimeout.setLayoutData(fdlBatchInsertTimeout);
wlBatchInsertTimeout.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.BatchInsertTimeout.TipText"));
wBatchInsertTimeout = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wBatchInsertTimeout);
wBatchInsertTimeout.addModifyListener(
e -> wBatchInsertTimeout.setToolTipText(variables.resolve(wBatchInsertTimeout.getText())));
FormData fdBatchInsertTimeout = new FormData();
fdBatchInsertTimeout.right = new FormAttachment(100, 0);
fdBatchInsertTimeout.top = new FormAttachment(wBatchSize, margin);
fdBatchInsertTimeout.left = new FormAttachment(middle, 0);
wBatchInsertTimeout.setLayoutData(fdBatchInsertTimeout);
// sub-batch size
Label wlSubBatchSize = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlSubBatchSize);
wlSubBatchSize.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.SubBatchSize.Label"));
wlSubBatchSize.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.SubBatchSize.TipText"));
FormData fdlSubBatchSize = new FormData();
fdlSubBatchSize.left = new FormAttachment(0, 0);
fdlSubBatchSize.top = new FormAttachment(wBatchInsertTimeout, margin);
fdlSubBatchSize.right = new FormAttachment(middle, -margin);
wlSubBatchSize.setLayoutData(fdlSubBatchSize);
wSubBatchSize = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wSubBatchSize);
wSubBatchSize.addModifyListener(
e -> wSubBatchSize.setToolTipText(variables.resolve(wSubBatchSize.getText())));
FormData fdSubBatchSize = new FormData();
fdSubBatchSize.right = new FormAttachment(100, 0);
fdSubBatchSize.top = new FormAttachment(wBatchInsertTimeout, margin);
fdSubBatchSize.left = new FormAttachment(middle, 0);
wSubBatchSize.setLayoutData(fdSubBatchSize);
// unlogged batch line
Label wlUnloggedBatch = new Label(wWriteComp, SWT.RIGHT);
wlUnloggedBatch.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.UnloggedBatch.Label"));
wlUnloggedBatch.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.UnloggedBatch.TipText"));
PropsUi.setLook(wlUnloggedBatch);
FormData fdlUnloggedBatch = new FormData();
fdlUnloggedBatch.left = new FormAttachment(0, 0);
fdlUnloggedBatch.top = new FormAttachment(wSubBatchSize, margin);
fdlUnloggedBatch.right = new FormAttachment(middle, -margin);
wlUnloggedBatch.setLayoutData(fdlUnloggedBatch);
wUnloggedBatch = new Button(wWriteComp, SWT.CHECK);
PropsUi.setLook(wUnloggedBatch);
FormData fdUnloggedBatch = new FormData();
fdUnloggedBatch.right = new FormAttachment(100, 0);
fdUnloggedBatch.top = new FormAttachment(wlUnloggedBatch, 0, SWT.CENTER);
fdUnloggedBatch.left = new FormAttachment(middle, 0);
wUnloggedBatch.setLayoutData(fdUnloggedBatch);
// TTL line
Label wlTtl = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlTtl);
wlTtl.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.TTL.Label"));
FormData fdlTtl = new FormData();
fdlTtl.left = new FormAttachment(0, 0);
fdlTtl.top = new FormAttachment(wlUnloggedBatch, 2 * margin);
fdlTtl.right = new FormAttachment(middle, -margin);
wlTtl.setLayoutData(fdlTtl);
wTtlUnits = new CCombo(wWriteComp, SWT.BORDER);
wTtlUnits.setEditable(false);
PropsUi.setLook(wTtlUnits);
FormData fdTtl = new FormData();
fdTtl.right = new FormAttachment(100, 0);
fdTtl.top = new FormAttachment(wlTtl, 0, SWT.CENTER);
wTtlUnits.setLayoutData(fdTtl);
wTtlUnits.setItems(CassandraOutputMeta.TtlUnits.getDescriptions());
wTtlUnits.select(0);
wTtlUnits.addListener(
SWT.Selection,
e -> {
if (wTtlUnits.getSelectionIndex() == 0) {
wTtlValue.setEnabled(false);
wTtlValue.setText("");
} else {
wTtlValue.setEnabled(true);
}
});
wTtlValue = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wTtlValue);
FormData fdTtlValue = new FormData();
fdTtlValue.right = new FormAttachment(wTtlUnits, -2 * margin);
fdTtlValue.top = new FormAttachment(wlTtl, 0, SWT.CENTER);
fdTtlValue.left = new FormAttachment(middle, 0);
wTtlValue.setLayoutData(fdTtlValue);
wTtlValue.setEnabled(false);
wTtlValue.addModifyListener(
e -> wTtlValue.setToolTipText(variables.resolve(wTtlValue.getText())));
// key field line
Label wlKeyField = new Label(wWriteComp, SWT.RIGHT);
PropsUi.setLook(wlKeyField);
wlKeyField.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.KeyField.Label"));
FormData fdlKeyField = new FormData();
fdlKeyField.left = new FormAttachment(0, 0);
fdlKeyField.top = new FormAttachment(wTtlValue, 2 * margin);
fdlKeyField.right = new FormAttachment(middle, -margin);
wlKeyField.setLayoutData(fdlKeyField);
Button wbGetFields = new Button(wWriteComp, SWT.PUSH | SWT.CENTER);
PropsUi.setLook(wbGetFields);
wbGetFields.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.GetFields.Button"));
FormData fdbGetFields = new FormData();
fdbGetFields.right = new FormAttachment(100, 0);
fdbGetFields.top = new FormAttachment(wlKeyField, 0, SWT.CENTER);
wbGetFields.setLayoutData(fdbGetFields);
wbGetFields.addListener(SWT.Selection, e -> showEnterSelectionDialog());
wKeyField = new TextVar(variables, wWriteComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wKeyField.addModifyListener(
e -> wKeyField.setToolTipText(variables.resolve(wKeyField.getText())));
FormData fdKeyField = new FormData();
fdKeyField.right = new FormAttachment(wbGetFields, -margin);
fdKeyField.top = new FormAttachment(wlKeyField, 0, SWT.CENTER);
fdKeyField.left = new FormAttachment(middle, 0);
wKeyField.setLayoutData(fdKeyField);
FormData fdWriteComp = new FormData();
fdWriteComp.left = new FormAttachment(0, 0);
fdWriteComp.top = new FormAttachment(0, 0);
fdWriteComp.right = new FormAttachment(100, 0);
fdWriteComp.bottom = new FormAttachment(100, 0);
wWriteComp.setLayoutData(fdWriteComp);
wWriteTab.setControl(wWriteComp);
// show schema button
Button wbShowSchema = new Button(wWriteComp, SWT.PUSH | SWT.CENTER);
wbShowSchema.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.Schema.Button"));
PropsUi.setLook(wbShowSchema);
FormData fdbShowSchema = new FormData();
fdbShowSchema.right = new FormAttachment(100, 0);
fdbShowSchema.bottom = new FormAttachment(100, -margin * 2);
wbShowSchema.setLayoutData(fdbShowSchema);
wbShowSchema.addListener(SWT.Selection, e -> popupSchemaInfo());
// ---- start of the schema options tab ----
CTabItem wSchemaTab = new CTabItem(wTabFolder, SWT.NONE);
wSchemaTab.setFont(GuiResource.getInstance().getFontDefault());
wSchemaTab.setText(BaseMessages.getString(PKG, "CassandraOutputData.Tab.Schema"));
Composite wSchemaComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wSchemaComp);
FormLayout schemaLayout = new FormLayout();
schemaLayout.marginWidth = 3;
schemaLayout.marginHeight = 3;
wSchemaComp.setLayout(schemaLayout);
// create table line
Label wlCreateTable = new Label(wSchemaComp, SWT.RIGHT);
PropsUi.setLook(wlCreateTable);
wlCreateTable.setText(BaseMessages.getString(PKG, "CassandraOutputDialog.CreateTable.Label"));
wlCreateTable.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.CreateTable.TipText"));
FormData fdlCreateTable = new FormData();
fdlCreateTable.left = new FormAttachment(0, 0);
fdlCreateTable.top = new FormAttachment(0, margin);
fdlCreateTable.right = new FormAttachment(middle, -margin);
wlCreateTable.setLayoutData(fdlCreateTable);
wbCreateTable = new Button(wSchemaComp, SWT.CHECK);
wbCreateTable.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.CreateTable.TipText"));
PropsUi.setLook(wbCreateTable);
FormData fdCreateTable = new FormData();
fdCreateTable.right = new FormAttachment(100, 0);
fdCreateTable.top = new FormAttachment(wlCreateTable, 0, SWT.CENTER);
fdCreateTable.left = new FormAttachment(middle, 0);
wbCreateTable.setLayoutData(fdCreateTable);
// table creation with clause line
Label wlWithClause = new Label(wSchemaComp, SWT.RIGHT);
wlWithClause.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.CreateTableWithClause.Label"));
wlWithClause.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.CreateTableWithClause.TipText"));
PropsUi.setLook(wlWithClause);
FormData fdlWithClause = new FormData();
fdlWithClause.left = new FormAttachment(0, 0);
fdlWithClause.top = new FormAttachment(wlCreateTable, 2 * margin);
fdlWithClause.right = new FormAttachment(middle, -margin);
wlWithClause.setLayoutData(fdlWithClause);
wWithClause = new TextVar(variables, wSchemaComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wWithClause);
wWithClause.addModifyListener(
e -> wWithClause.setToolTipText(variables.resolve(wWithClause.getText())));
FormData fdWithClause = new FormData();
fdWithClause.right = new FormAttachment(100, 0);
fdWithClause.top = new FormAttachment(wlWithClause, 0, SWT.CENTER);
fdWithClause.left = new FormAttachment(middle, 0);
wWithClause.setLayoutData(fdWithClause);
// truncate table line
Label wlTruncateTable = new Label(wSchemaComp, SWT.RIGHT);
PropsUi.setLook(wlTruncateTable);
wlTruncateTable.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.TruncateTable.Label"));
wlTruncateTable.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.TruncateTable.TipText"));
FormData fdlTruncateTable = new FormData();
fdlTruncateTable.left = new FormAttachment(0, 0);
fdlTruncateTable.top = new FormAttachment(wWithClause, margin);
fdlTruncateTable.right = new FormAttachment(middle, -margin);
wlTruncateTable.setLayoutData(fdlTruncateTable);
wTruncateTable = new Button(wSchemaComp, SWT.CHECK);
wTruncateTable.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.TruncateTable.TipText"));
PropsUi.setLook(wTruncateTable);
FormData fdTruncateTable = new FormData();
fdTruncateTable.right = new FormAttachment(100, 0);
fdTruncateTable.top = new FormAttachment(wlTruncateTable, 0, SWT.CENTER);
fdTruncateTable.left = new FormAttachment(middle, 0);
wTruncateTable.setLayoutData(fdTruncateTable);
// update table meta data line
Label wlUpdateTableMetaData = new Label(wSchemaComp, SWT.RIGHT);
PropsUi.setLook(wlUpdateTableMetaData);
wlUpdateTableMetaData.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.UpdateTableMetaData.Label"));
wlUpdateTableMetaData.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.UpdateTableMetaData.TipText"));
FormData fdlUpdateTableMetaData = new FormData();
fdlUpdateTableMetaData.left = new FormAttachment(0, 0);
fdlUpdateTableMetaData.top = new FormAttachment(wlTruncateTable, 2 * margin);
fdlUpdateTableMetaData.right = new FormAttachment(middle, -margin);
wlUpdateTableMetaData.setLayoutData(fdlUpdateTableMetaData);
wUpdateTableMetaData = new Button(wSchemaComp, SWT.CHECK);
wUpdateTableMetaData.setToolTipText(
BaseMessages.getString(PKG, "CassandraOutputDialog.UpdateTableMetaData.TipText"));
PropsUi.setLook(wUpdateTableMetaData);
FormData fdUpdateTableMetaData = new FormData();
fdUpdateTableMetaData.right = new FormAttachment(100, 0);
fdUpdateTableMetaData.top = new FormAttachment(wlUpdateTableMetaData, 0, SWT.CENTER);
fdUpdateTableMetaData.left = new FormAttachment(middle, 0);
wUpdateTableMetaData.setLayoutData(fdUpdateTableMetaData);
// insert fields not in meta line
Label wlInsertFieldsNotInTableMeta = new Label(wSchemaComp, SWT.RIGHT);
PropsUi.setLook(wlInsertFieldsNotInTableMeta);
wlInsertFieldsNotInTableMeta.setText(
BaseMessages.getString(PKG, "CassandraOutputDialog.InsertFieldsNotInTableMetaData.Label"));
wlInsertFieldsNotInTableMeta.setToolTipText(
BaseMessages.getString(
PKG, "CassandraOutputDialog.InsertFieldsNotInTableMetaData.TipText"));
FormData fdlInsertFieldsNotInTableMeta = new FormData();
fdlInsertFieldsNotInTableMeta.left = new FormAttachment(0, 0);
fdlInsertFieldsNotInTableMeta.top = new FormAttachment(wlUpdateTableMetaData, 2 * margin);
fdlInsertFieldsNotInTableMeta.right = new FormAttachment(middle, -margin);
wlInsertFieldsNotInTableMeta.setLayoutData(fdlInsertFieldsNotInTableMeta);
wInsertFieldsNotInTableMeta = new Button(wSchemaComp, SWT.CHECK);
wInsertFieldsNotInTableMeta.setToolTipText(
BaseMessages.getString(
PKG, "CassandraOutputDialog.InsertFieldsNotInTableMetaData.TipText"));
PropsUi.setLook(wInsertFieldsNotInTableMeta);
FormData fdInsertFieldsNotInTableMeta = new FormData();
fdInsertFieldsNotInTableMeta.right = new FormAttachment(100, 0);
fdInsertFieldsNotInTableMeta.top =
new FormAttachment(wlInsertFieldsNotInTableMeta, 0, SWT.CENTER);
fdInsertFieldsNotInTableMeta.left = new FormAttachment(middle, 0);
wInsertFieldsNotInTableMeta.setLayoutData(fdInsertFieldsNotInTableMeta);
FormData fdSchemaComp = new FormData();
fdSchemaComp.left = new FormAttachment(0, 0);
fdSchemaComp.top = new FormAttachment(0, 0);
fdSchemaComp.right = new FormAttachment(100, 0);
fdSchemaComp.bottom = new FormAttachment(100, 0);
wSchemaComp.setLayoutData(fdSchemaComp);
wSchemaComp.layout();
wSchemaTab.setControl(wSchemaComp);
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment(0, 0);
fdTabFolder.top = new FormAttachment(wlTransformName, margin);
fdTabFolder.right = new FormAttachment(100, 0);
fdTabFolder.bottom = new FormAttachment(wOk, -2 * margin);
wTabFolder.setLayoutData(fdTabFolder);
wTabFolder.setSelection(0);
getData();
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return transformName;
}
protected void setupTablesCombo() {
DriverConnection conn = null;
Keyspace kSpace = null;
try {
String connectionName = variables.resolve(wConnection.getText());
if (StringUtils.isEmpty(connectionName)) {
return;
}
CassandraConnection cassandraConnection =
metadataProvider.getSerializer(CassandraConnection.class).load(connectionName);
try {
conn = cassandraConnection.createConnection(variables, false);
kSpace = cassandraConnection.lookupKeyspace(conn, variables);
} catch (Exception e) {
logError(
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e.getLocalizedMessage(),
e);
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_TITLE),
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e.getLocalizedMessage(),
e);
return;
}
List<String> tables = kSpace.getTableNamesCQL3();
wTable.removeAll();
for (String famName : tables) {
wTable.add(famName);
}
} catch (Exception ex) {
logError(
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ ex.getMessage(),
ex);
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_TITLE),
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ ex.getMessage(),
ex);
} finally {
if (conn != null) {
try {
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
protected void showEnterSelectionDialog() {
TransformMeta transformMeta = pipelineMeta.findTransform(transformName);
String[] choices;
if (transformMeta != null) {
try {
IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta);
if (row.isEmpty()) {
BaseDialog.openMessageBox(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS_TITLE),
BaseMessages.getString(PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS),
SWT.ICON_ERROR | SWT.OK);
return;
}
choices = new String[row.size()];
for (int i = 0; i < row.size(); i++) {
IValueMeta vm = row.getValueMeta(i);
choices[i] = vm.getName();
}
EnterSelectionDialog dialog =
new EnterSelectionDialog(
shell,
choices,
BaseMessages.getString(PKG, "CassandraOutputDialog.SelectKeyFieldsDialog.Title"),
BaseMessages.getString(PKG, "CassandraOutputDialog.SelectKeyFieldsDialog.Message"));
dialog.setMulti(true);
if (!Utils.isEmpty(wKeyField.getText())) {
String current = wKeyField.getText();
String[] parts = current.split(",");
int[] currentSelection = new int[parts.length];
int count = 0;
for (String s : parts) {
int index = row.indexOfValue(s.trim());
if (index >= 0) {
currentSelection[count++] = index;
}
}
dialog.setSelectedNrs(currentSelection);
}
dialog.open();
int[] selected = dialog.getSelectionIndeces(); // SIC
if (selected != null && selected.length > 0) {
StringBuilder newSelection = new StringBuilder();
boolean first = true;
for (int i : selected) {
if (first) {
newSelection.append(choices[i]);
first = false;
} else {
newSelection.append(",").append(choices[i]);
}
}
wKeyField.setText(newSelection.toString());
}
} catch (HopException ex) {
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS_TITLE),
BaseMessages.getString(PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS),
ex);
}
}
}
protected void setupFieldsCombo() {
// try and set up from incoming fields from previous transform
TransformMeta transformMeta = pipelineMeta.findTransform(transformName);
if (transformMeta != null) {
try {
IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta);
if (row.isEmpty()) {
BaseDialog.openMessageBox(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS_TITLE),
BaseMessages.getString(PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS),
SWT.ICON_ERROR | SWT.OK);
return;
}
} catch (HopException ex) {
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS_TITLE),
BaseMessages.getString(PKG, CONST_CASSANDRA_OUTPUT_DATA_MESSAGE_NO_INCOMING_FIELDS),
ex);
}
}
}
protected void ok() {
if (Utils.isEmpty(wlTransformName.getText())) {
return;
}
transformName = wTransformName.getText();
input.setConnectionName(wConnection.getText());
input.setTableName(wTable.getText());
input.setConsistency(wConsistency.getText());
input.setBatchSize(wBatchSize.getText());
input.setCqlBatchInsertTimeout(wBatchInsertTimeout.getText());
input.setCqlSubBatchSize(wSubBatchSize.getText());
input.setKeyField(wKeyField.getText());
input.setCreateTable(wbCreateTable.getSelection());
input.setTruncateTable(wTruncateTable.getSelection());
input.setUpdateCassandraMeta(wUpdateTableMetaData.getSelection());
input.setInsertFieldsNotInMeta(wInsertFieldsNotInTableMeta.getSelection());
input.setCreateTableWithClause(wWithClause.getText());
input.setUseUnloggedBatch(wUnloggedBatch.getSelection());
input.setTtl(wTtlValue.getText());
input.setTtlUnit(CassandraOutputMeta.TtlUnits.findWithDescription(wTtlUnits.getText()));
input.setChanged();
dispose();
}
protected void cancel() {
transformName = null;
dispose();
}
protected void popupSchemaInfo() {
DriverConnection conn = null;
Keyspace kSpace = null;
try {
String connectionName = variables.resolve(wConnection.getText());
if (StringUtils.isEmpty(connectionName)) {
return;
}
CassandraConnection cassandraConnection =
metadataProvider.getSerializer(CassandraConnection.class).load(connectionName);
try {
conn = cassandraConnection.createConnection(variables, false);
kSpace = cassandraConnection.lookupKeyspace(conn, variables);
} catch (Exception e) {
logError(
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e.getLocalizedMessage(),
e);
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_TITLE),
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e.getLocalizedMessage(),
e);
return;
}
String table = variables.resolve(wTable.getText());
if (Utils.isEmpty(table)) {
throw new Exception("No table name specified!");
}
table = CassandraUtils.cql3MixedCaseQuote(table);
if (!kSpace.tableExists(table)) {
throw new Exception(
"The table '"
+ table
+ "' does not "
+ "seem to exist in the keyspace '"
+ cassandraConnection.getKeyspace());
}
ITableMetaData cassMeta = kSpace.getTableMetaData(table);
String schemaDescription = cassMeta.describe();
ShowMessageDialog smd =
new ShowMessageDialog(
shell, SWT.ICON_INFORMATION | SWT.OK, "Schema info", schemaDescription, true);
smd.open();
} catch (Exception e1) {
logError(
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e1.getMessage(),
e1);
new ErrorDialog(
shell,
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_TITLE),
BaseMessages.getString(
PKG, CONST_CASSANDRA_OUTPUT_DIALOG_ERROR_PROBLEM_GETTING_SCHEMA_INFO_MESSAGE)
+ CONST_NEWLINES
+ e1.getMessage(),
e1);
} finally {
if (conn != null) {
try {
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
protected void getData() {
wTransformName.setText(Const.NVL(transformName, ""));
wConnection.setText(Const.NVL(input.getConnectionName(), ""));
wTable.setText(Const.NVL(input.getTableName(), ""));
wConsistency.setText(Const.NVL(input.getConsistency(), ""));
wBatchSize.setText(Const.NVL(input.getBatchSize(), ""));
wBatchInsertTimeout.setText(Const.NVL(input.getCqlBatchInsertTimeout(), ""));
wSubBatchSize.setText(Const.NVL(input.getBatchSize(), ""));
wKeyField.setText(Const.NVL(input.getKeyField(), ""));
wWithClause.setText(Const.NVL(input.getCreateTableWithClause(), ""));
wbCreateTable.setSelection(input.isCreateTable());
wTruncateTable.setSelection(input.isTruncateTable());
wUpdateTableMetaData.setSelection(input.isUpdateCassandraMeta());
wInsertFieldsNotInTableMeta.setSelection(input.isInsertFieldsNotInMeta());
wUnloggedBatch.setSelection(input.isUseUnloggedBatch());
wTtlValue.setText(Const.NVL(input.getTtl(), ""));
wTtlUnits.setText(input.getTtlUnit() == null ? "" : input.getTtlUnit().getDescription());
wTtlValue.setEnabled(wTtlUnits.getSelectionIndex() > 0);
}
}
|
googleads/google-ads-java | 38,327 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/KeywordPlanIdeaServiceGrpc.java | package com.google.ads.googleads.v19.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v19/services/keyword_plan_idea_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class KeywordPlanIdeaServiceGrpc {
private KeywordPlanIdeaServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v19.services.KeywordPlanIdeaService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordIdeas",
requestType = com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest.class,
responseType = com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod = getGenerateKeywordIdeasMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordIdeas"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordIdeas"))
.build();
}
}
}
return getGenerateKeywordIdeasMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordHistoricalMetrics",
requestType = com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest.class,
responseType = com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod = getGenerateKeywordHistoricalMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordHistoricalMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordHistoricalMetrics"))
.build();
}
}
}
return getGenerateKeywordHistoricalMetricsMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateAdGroupThemes",
requestType = com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.class,
responseType = com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod = getGenerateAdGroupThemesMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateAdGroupThemes"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateAdGroupThemes"))
.build();
}
}
}
return getGenerateAdGroupThemesMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordForecastMetrics",
requestType = com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest.class,
responseType = com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod = getGenerateKeywordForecastMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordForecastMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordForecastMetrics"))
.build();
}
}
}
return getGenerateKeywordForecastMetricsMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports all types of calls on the service
*/
public static KeywordPlanIdeaServiceBlockingV2Stub newBlockingV2Stub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static KeywordPlanIdeaServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static KeywordPlanIdeaServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public interface AsyncService {
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordIdeas(com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordIdeasMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordHistoricalMetrics(com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordHistoricalMetricsMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateAdGroupThemes(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateAdGroupThemesMethod(), responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordForecastMetrics(com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordForecastMetricsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static abstract class KeywordPlanIdeaServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return KeywordPlanIdeaServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceStub
extends io.grpc.stub.AbstractAsyncStub<KeywordPlanIdeaServiceStub> {
private KeywordPlanIdeaServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordIdeas(com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordHistoricalMetrics(com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateAdGroupThemes(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordForecastMetrics(com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingV2Stub> {
private KeywordPlanIdeaServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingStub> {
private KeywordPlanIdeaServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<KeywordPlanIdeaServiceFutureStub> {
private KeywordPlanIdeaServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse> generateKeywordIdeas(
com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse> generateKeywordHistoricalMetrics(
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse> generateAdGroupThemes(
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse> generateKeywordForecastMetrics(
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GENERATE_KEYWORD_IDEAS = 0;
private static final int METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS = 1;
private static final int METHODID_GENERATE_AD_GROUP_THEMES = 2;
private static final int METHODID_GENERATE_KEYWORD_FORECAST_METRICS = 3;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GENERATE_KEYWORD_IDEAS:
serviceImpl.generateKeywordIdeas((com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS:
serviceImpl.generateKeywordHistoricalMetrics((com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse>) responseObserver);
break;
case METHODID_GENERATE_AD_GROUP_THEMES:
serviceImpl.generateAdGroupThemes((com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_FORECAST_METRICS:
serviceImpl.generateKeywordForecastMetrics((com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGenerateKeywordIdeasMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v19.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v19.services.GenerateKeywordIdeaResponse>(
service, METHODID_GENERATE_KEYWORD_IDEAS)))
.addMethod(
getGenerateKeywordHistoricalMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordHistoricalMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS)))
.addMethod(
getGenerateAdGroupThemesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v19.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v19.services.GenerateAdGroupThemesResponse>(
service, METHODID_GENERATE_AD_GROUP_THEMES)))
.addMethod(
getGenerateKeywordForecastMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v19.services.GenerateKeywordForecastMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_FORECAST_METRICS)))
.build();
}
private static abstract class KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
KeywordPlanIdeaServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v19.services.KeywordPlanIdeaServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("KeywordPlanIdeaService");
}
}
private static final class KeywordPlanIdeaServiceFileDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier {
KeywordPlanIdeaServiceFileDescriptorSupplier() {}
}
private static final class KeywordPlanIdeaServiceMethodDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
KeywordPlanIdeaServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new KeywordPlanIdeaServiceFileDescriptorSupplier())
.addMethod(getGenerateKeywordIdeasMethod())
.addMethod(getGenerateKeywordHistoricalMetricsMethod())
.addMethod(getGenerateAdGroupThemesMethod())
.addMethod(getGenerateKeywordForecastMetricsMethod())
.build();
}
}
}
return result;
}
}
|
googleads/google-ads-java | 38,327 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/KeywordPlanIdeaServiceGrpc.java | package com.google.ads.googleads.v20.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v20/services/keyword_plan_idea_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class KeywordPlanIdeaServiceGrpc {
private KeywordPlanIdeaServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v20.services.KeywordPlanIdeaService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordIdeas",
requestType = com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest.class,
responseType = com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod = getGenerateKeywordIdeasMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordIdeas"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordIdeas"))
.build();
}
}
}
return getGenerateKeywordIdeasMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordHistoricalMetrics",
requestType = com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest.class,
responseType = com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod = getGenerateKeywordHistoricalMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordHistoricalMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordHistoricalMetrics"))
.build();
}
}
}
return getGenerateKeywordHistoricalMetricsMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateAdGroupThemes",
requestType = com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.class,
responseType = com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod = getGenerateAdGroupThemesMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateAdGroupThemes"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateAdGroupThemes"))
.build();
}
}
}
return getGenerateAdGroupThemesMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordForecastMetrics",
requestType = com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest.class,
responseType = com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod = getGenerateKeywordForecastMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordForecastMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordForecastMetrics"))
.build();
}
}
}
return getGenerateKeywordForecastMetricsMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports all types of calls on the service
*/
public static KeywordPlanIdeaServiceBlockingV2Stub newBlockingV2Stub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static KeywordPlanIdeaServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static KeywordPlanIdeaServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public interface AsyncService {
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordIdeas(com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordIdeasMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordHistoricalMetrics(com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordHistoricalMetricsMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateAdGroupThemes(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateAdGroupThemesMethod(), responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordForecastMetrics(com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordForecastMetricsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static abstract class KeywordPlanIdeaServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return KeywordPlanIdeaServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceStub
extends io.grpc.stub.AbstractAsyncStub<KeywordPlanIdeaServiceStub> {
private KeywordPlanIdeaServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordIdeas(com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordHistoricalMetrics(com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateAdGroupThemes(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordForecastMetrics(com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingV2Stub> {
private KeywordPlanIdeaServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingStub> {
private KeywordPlanIdeaServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<KeywordPlanIdeaServiceFutureStub> {
private KeywordPlanIdeaServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse> generateKeywordIdeas(
com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse> generateKeywordHistoricalMetrics(
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse> generateAdGroupThemes(
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse> generateKeywordForecastMetrics(
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GENERATE_KEYWORD_IDEAS = 0;
private static final int METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS = 1;
private static final int METHODID_GENERATE_AD_GROUP_THEMES = 2;
private static final int METHODID_GENERATE_KEYWORD_FORECAST_METRICS = 3;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GENERATE_KEYWORD_IDEAS:
serviceImpl.generateKeywordIdeas((com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS:
serviceImpl.generateKeywordHistoricalMetrics((com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse>) responseObserver);
break;
case METHODID_GENERATE_AD_GROUP_THEMES:
serviceImpl.generateAdGroupThemes((com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_FORECAST_METRICS:
serviceImpl.generateKeywordForecastMetrics((com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGenerateKeywordIdeasMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v20.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v20.services.GenerateKeywordIdeaResponse>(
service, METHODID_GENERATE_KEYWORD_IDEAS)))
.addMethod(
getGenerateKeywordHistoricalMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordHistoricalMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS)))
.addMethod(
getGenerateAdGroupThemesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v20.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v20.services.GenerateAdGroupThemesResponse>(
service, METHODID_GENERATE_AD_GROUP_THEMES)))
.addMethod(
getGenerateKeywordForecastMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v20.services.GenerateKeywordForecastMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_FORECAST_METRICS)))
.build();
}
private static abstract class KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
KeywordPlanIdeaServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v20.services.KeywordPlanIdeaServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("KeywordPlanIdeaService");
}
}
private static final class KeywordPlanIdeaServiceFileDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier {
KeywordPlanIdeaServiceFileDescriptorSupplier() {}
}
private static final class KeywordPlanIdeaServiceMethodDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
KeywordPlanIdeaServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new KeywordPlanIdeaServiceFileDescriptorSupplier())
.addMethod(getGenerateKeywordIdeasMethod())
.addMethod(getGenerateKeywordHistoricalMetricsMethod())
.addMethod(getGenerateAdGroupThemesMethod())
.addMethod(getGenerateKeywordForecastMetricsMethod())
.build();
}
}
}
return result;
}
}
|
googleads/google-ads-java | 38,327 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/KeywordPlanIdeaServiceGrpc.java | package com.google.ads.googleads.v21.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v21/services/keyword_plan_idea_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class KeywordPlanIdeaServiceGrpc {
private KeywordPlanIdeaServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.ads.googleads.v21.services.KeywordPlanIdeaService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordIdeas",
requestType = com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest.class,
responseType = com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> getGenerateKeywordIdeasMethod;
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordIdeasMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordIdeasMethod = getGenerateKeywordIdeasMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest, com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordIdeas"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordIdeas"))
.build();
}
}
}
return getGenerateKeywordIdeasMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordHistoricalMetrics",
requestType = com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest.class,
responseType = com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> getGenerateKeywordHistoricalMetricsMethod;
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordHistoricalMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordHistoricalMetricsMethod = getGenerateKeywordHistoricalMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest, com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordHistoricalMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordHistoricalMetrics"))
.build();
}
}
}
return getGenerateKeywordHistoricalMetricsMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateAdGroupThemes",
requestType = com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.class,
responseType = com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> getGenerateAdGroupThemesMethod;
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateAdGroupThemesMethod = KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateAdGroupThemesMethod = getGenerateAdGroupThemesMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest, com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateAdGroupThemes"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateAdGroupThemes"))
.build();
}
}
}
return getGenerateAdGroupThemesMethod;
}
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GenerateKeywordForecastMetrics",
requestType = com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest.class,
responseType = com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> getGenerateKeywordForecastMetricsMethod;
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
if ((getGenerateKeywordForecastMetricsMethod = KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod) == null) {
KeywordPlanIdeaServiceGrpc.getGenerateKeywordForecastMetricsMethod = getGenerateKeywordForecastMetricsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest, com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateKeywordForecastMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse.getDefaultInstance()))
.setSchemaDescriptor(new KeywordPlanIdeaServiceMethodDescriptorSupplier("GenerateKeywordForecastMetrics"))
.build();
}
}
}
return getGenerateKeywordForecastMetricsMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports all types of calls on the service
*/
public static KeywordPlanIdeaServiceBlockingV2Stub newBlockingV2Stub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingV2Stub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static KeywordPlanIdeaServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceBlockingStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static KeywordPlanIdeaServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<KeywordPlanIdeaServiceFutureStub>() {
@java.lang.Override
public KeywordPlanIdeaServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
};
return KeywordPlanIdeaServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public interface AsyncService {
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordIdeas(com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordIdeasMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordHistoricalMetrics(com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordHistoricalMetricsMethod(), responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateAdGroupThemes(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateAdGroupThemesMethod(), responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
default void generateKeywordForecastMetrics(com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGenerateKeywordForecastMetricsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static abstract class KeywordPlanIdeaServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return KeywordPlanIdeaServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceStub
extends io.grpc.stub.AbstractAsyncStub<KeywordPlanIdeaServiceStub> {
private KeywordPlanIdeaServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordIdeas(com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordHistoricalMetrics(com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateAdGroupThemes(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request, responseObserver);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public void generateKeywordForecastMetrics(com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingV2Stub> {
private KeywordPlanIdeaServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingV2Stub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<KeywordPlanIdeaServiceBlockingStub> {
private KeywordPlanIdeaServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse generateKeywordIdeas(com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordIdeasMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse generateKeywordHistoricalMetrics(com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordHistoricalMetricsMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse generateAdGroupThemes(com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateAdGroupThemesMethod(), getCallOptions(), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse generateKeywordForecastMetrics(com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGenerateKeywordForecastMetricsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service KeywordPlanIdeaService.
* <pre>
* Service to generate keyword ideas.
* </pre>
*/
public static final class KeywordPlanIdeaServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<KeywordPlanIdeaServiceFutureStub> {
private KeywordPlanIdeaServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected KeywordPlanIdeaServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new KeywordPlanIdeaServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Returns a list of keyword ideas.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [KeywordPlanIdeaError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse> generateKeywordIdeas(
com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordIdeasMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of keyword historical metrics.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse> generateKeywordHistoricalMetrics(
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordHistoricalMetricsMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns a list of suggested AdGroups and suggested modifications
* (text, match type) for the given keywords.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse> generateAdGroupThemes(
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateAdGroupThemesMethod(), getCallOptions()), request);
}
/**
* <pre>
* Returns metrics (such as impressions, clicks, total cost) of a keyword
* forecast for the given campaign.
* List of thrown errors:
* [AuthenticationError]()
* [AuthorizationError]()
* [CollectionSizeError]()
* [HeaderError]()
* [InternalError]()
* [QuotaError]()
* [RequestError]()
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse> generateKeywordForecastMetrics(
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGenerateKeywordForecastMetricsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GENERATE_KEYWORD_IDEAS = 0;
private static final int METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS = 1;
private static final int METHODID_GENERATE_AD_GROUP_THEMES = 2;
private static final int METHODID_GENERATE_KEYWORD_FORECAST_METRICS = 3;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GENERATE_KEYWORD_IDEAS:
serviceImpl.generateKeywordIdeas((com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS:
serviceImpl.generateKeywordHistoricalMetrics((com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse>) responseObserver);
break;
case METHODID_GENERATE_AD_GROUP_THEMES:
serviceImpl.generateAdGroupThemes((com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse>) responseObserver);
break;
case METHODID_GENERATE_KEYWORD_FORECAST_METRICS:
serviceImpl.generateKeywordForecastMetrics((com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGenerateKeywordIdeasMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v21.services.GenerateKeywordIdeasRequest,
com.google.ads.googleads.v21.services.GenerateKeywordIdeaResponse>(
service, METHODID_GENERATE_KEYWORD_IDEAS)))
.addMethod(
getGenerateKeywordHistoricalMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordHistoricalMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_HISTORICAL_METRICS)))
.addMethod(
getGenerateAdGroupThemesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v21.services.GenerateAdGroupThemesRequest,
com.google.ads.googleads.v21.services.GenerateAdGroupThemesResponse>(
service, METHODID_GENERATE_AD_GROUP_THEMES)))
.addMethod(
getGenerateKeywordForecastMetricsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsRequest,
com.google.ads.googleads.v21.services.GenerateKeywordForecastMetricsResponse>(
service, METHODID_GENERATE_KEYWORD_FORECAST_METRICS)))
.build();
}
private static abstract class KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
KeywordPlanIdeaServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v21.services.KeywordPlanIdeaServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("KeywordPlanIdeaService");
}
}
private static final class KeywordPlanIdeaServiceFileDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier {
KeywordPlanIdeaServiceFileDescriptorSupplier() {}
}
private static final class KeywordPlanIdeaServiceMethodDescriptorSupplier
extends KeywordPlanIdeaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
KeywordPlanIdeaServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (KeywordPlanIdeaServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new KeywordPlanIdeaServiceFileDescriptorSupplier())
.addMethod(getGenerateKeywordIdeasMethod())
.addMethod(getGenerateKeywordHistoricalMetricsMethod())
.addMethod(getGenerateAdGroupThemesMethod())
.addMethod(getGenerateKeywordForecastMetricsMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java | 37,941 | java-workflows/proto-google-cloud-workflows-v1/src/main/java/com/google/cloud/workflows/v1/CreateWorkflowRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/workflows/v1/workflows.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.workflows.v1;
/**
*
*
* <pre>
* Request for the
* [CreateWorkflow][google.cloud.workflows.v1.Workflows.CreateWorkflow]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.v1.CreateWorkflowRequest}
*/
public final class CreateWorkflowRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.workflows.v1.CreateWorkflowRequest)
CreateWorkflowRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateWorkflowRequest.newBuilder() to construct.
private CreateWorkflowRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateWorkflowRequest() {
parent_ = "";
workflowId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateWorkflowRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.v1.WorkflowsProto
.internal_static_google_cloud_workflows_v1_CreateWorkflowRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.v1.WorkflowsProto
.internal_static_google_cloud_workflows_v1_CreateWorkflowRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.v1.CreateWorkflowRequest.class,
com.google.cloud.workflows.v1.CreateWorkflowRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int WORKFLOW_FIELD_NUMBER = 2;
private com.google.cloud.workflows.v1.Workflow workflow_;
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workflow field is set.
*/
@java.lang.Override
public boolean hasWorkflow() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workflow.
*/
@java.lang.Override
public com.google.cloud.workflows.v1.Workflow getWorkflow() {
return workflow_ == null
? com.google.cloud.workflows.v1.Workflow.getDefaultInstance()
: workflow_;
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.workflows.v1.WorkflowOrBuilder getWorkflowOrBuilder() {
return workflow_ == null
? com.google.cloud.workflows.v1.Workflow.getDefaultInstance()
: workflow_;
}
public static final int WORKFLOW_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object workflowId_ = "";
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The workflowId.
*/
@java.lang.Override
public java.lang.String getWorkflowId() {
java.lang.Object ref = workflowId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workflowId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for workflowId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getWorkflowIdBytes() {
java.lang.Object ref = workflowId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workflowId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getWorkflow());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workflowId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, workflowId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWorkflow());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workflowId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, workflowId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.workflows.v1.CreateWorkflowRequest)) {
return super.equals(obj);
}
com.google.cloud.workflows.v1.CreateWorkflowRequest other =
(com.google.cloud.workflows.v1.CreateWorkflowRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasWorkflow() != other.hasWorkflow()) return false;
if (hasWorkflow()) {
if (!getWorkflow().equals(other.getWorkflow())) return false;
}
if (!getWorkflowId().equals(other.getWorkflowId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasWorkflow()) {
hash = (37 * hash) + WORKFLOW_FIELD_NUMBER;
hash = (53 * hash) + getWorkflow().hashCode();
}
hash = (37 * hash) + WORKFLOW_ID_FIELD_NUMBER;
hash = (53 * hash) + getWorkflowId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.workflows.v1.CreateWorkflowRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the
* [CreateWorkflow][google.cloud.workflows.v1.Workflows.CreateWorkflow]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.v1.CreateWorkflowRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.workflows.v1.CreateWorkflowRequest)
com.google.cloud.workflows.v1.CreateWorkflowRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.v1.WorkflowsProto
.internal_static_google_cloud_workflows_v1_CreateWorkflowRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.v1.WorkflowsProto
.internal_static_google_cloud_workflows_v1_CreateWorkflowRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.v1.CreateWorkflowRequest.class,
com.google.cloud.workflows.v1.CreateWorkflowRequest.Builder.class);
}
// Construct using com.google.cloud.workflows.v1.CreateWorkflowRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWorkflowFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
workflow_ = null;
if (workflowBuilder_ != null) {
workflowBuilder_.dispose();
workflowBuilder_ = null;
}
workflowId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.workflows.v1.WorkflowsProto
.internal_static_google_cloud_workflows_v1_CreateWorkflowRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.workflows.v1.CreateWorkflowRequest getDefaultInstanceForType() {
return com.google.cloud.workflows.v1.CreateWorkflowRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.workflows.v1.CreateWorkflowRequest build() {
com.google.cloud.workflows.v1.CreateWorkflowRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.workflows.v1.CreateWorkflowRequest buildPartial() {
com.google.cloud.workflows.v1.CreateWorkflowRequest result =
new com.google.cloud.workflows.v1.CreateWorkflowRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.workflows.v1.CreateWorkflowRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.workflow_ = workflowBuilder_ == null ? workflow_ : workflowBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.workflowId_ = workflowId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.workflows.v1.CreateWorkflowRequest) {
return mergeFrom((com.google.cloud.workflows.v1.CreateWorkflowRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.workflows.v1.CreateWorkflowRequest other) {
if (other == com.google.cloud.workflows.v1.CreateWorkflowRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasWorkflow()) {
mergeWorkflow(other.getWorkflow());
}
if (!other.getWorkflowId().isEmpty()) {
workflowId_ = other.workflowId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getWorkflowFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
workflowId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Project and location in which the workflow should be created.
* Format: projects/{project}/locations/{location}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.workflows.v1.Workflow workflow_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.workflows.v1.Workflow,
com.google.cloud.workflows.v1.Workflow.Builder,
com.google.cloud.workflows.v1.WorkflowOrBuilder>
workflowBuilder_;
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workflow field is set.
*/
public boolean hasWorkflow() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workflow.
*/
public com.google.cloud.workflows.v1.Workflow getWorkflow() {
if (workflowBuilder_ == null) {
return workflow_ == null
? com.google.cloud.workflows.v1.Workflow.getDefaultInstance()
: workflow_;
} else {
return workflowBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkflow(com.google.cloud.workflows.v1.Workflow value) {
if (workflowBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
workflow_ = value;
} else {
workflowBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkflow(com.google.cloud.workflows.v1.Workflow.Builder builderForValue) {
if (workflowBuilder_ == null) {
workflow_ = builderForValue.build();
} else {
workflowBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeWorkflow(com.google.cloud.workflows.v1.Workflow value) {
if (workflowBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& workflow_ != null
&& workflow_ != com.google.cloud.workflows.v1.Workflow.getDefaultInstance()) {
getWorkflowBuilder().mergeFrom(value);
} else {
workflow_ = value;
}
} else {
workflowBuilder_.mergeFrom(value);
}
if (workflow_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearWorkflow() {
bitField0_ = (bitField0_ & ~0x00000002);
workflow_ = null;
if (workflowBuilder_ != null) {
workflowBuilder_.dispose();
workflowBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.workflows.v1.Workflow.Builder getWorkflowBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getWorkflowFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.workflows.v1.WorkflowOrBuilder getWorkflowOrBuilder() {
if (workflowBuilder_ != null) {
return workflowBuilder_.getMessageOrBuilder();
} else {
return workflow_ == null
? com.google.cloud.workflows.v1.Workflow.getDefaultInstance()
: workflow_;
}
}
/**
*
*
* <pre>
* Required. Workflow to be created.
* </pre>
*
* <code>
* .google.cloud.workflows.v1.Workflow workflow = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.workflows.v1.Workflow,
com.google.cloud.workflows.v1.Workflow.Builder,
com.google.cloud.workflows.v1.WorkflowOrBuilder>
getWorkflowFieldBuilder() {
if (workflowBuilder_ == null) {
workflowBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.workflows.v1.Workflow,
com.google.cloud.workflows.v1.Workflow.Builder,
com.google.cloud.workflows.v1.WorkflowOrBuilder>(
getWorkflow(), getParentForChildren(), isClean());
workflow_ = null;
}
return workflowBuilder_;
}
private java.lang.Object workflowId_ = "";
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The workflowId.
*/
public java.lang.String getWorkflowId() {
java.lang.Object ref = workflowId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workflowId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for workflowId.
*/
public com.google.protobuf.ByteString getWorkflowIdBytes() {
java.lang.Object ref = workflowId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workflowId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The workflowId to set.
* @return This builder for chaining.
*/
public Builder setWorkflowId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
workflowId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearWorkflowId() {
workflowId_ = getDefaultInstance().getWorkflowId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the workflow to be created. It has to fulfill the
* following requirements:
*
* * Must contain only letters, numbers, underscores and hyphens.
* * Must start with a letter.
* * Must be between 1-64 characters.
* * Must end with a number or a letter.
* * Must be unique within the customer project and location.
* </pre>
*
* <code>string workflow_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for workflowId to set.
* @return This builder for chaining.
*/
public Builder setWorkflowIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
workflowId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.workflows.v1.CreateWorkflowRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.workflows.v1.CreateWorkflowRequest)
private static final com.google.cloud.workflows.v1.CreateWorkflowRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.workflows.v1.CreateWorkflowRequest();
}
public static com.google.cloud.workflows.v1.CreateWorkflowRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateWorkflowRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateWorkflowRequest>() {
@java.lang.Override
public CreateWorkflowRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateWorkflowRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateWorkflowRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.workflows.v1.CreateWorkflowRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-imaging | 38,192 | src/main/java/org/apache/commons/imaging/formats/jpeg/JpegImageParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.imaging.formats.jpeg;
import static org.apache.commons.imaging.common.BinaryFunctions.remainingBytes;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.imaging.AbstractImageParser;
import org.apache.commons.imaging.ImageFormat;
import org.apache.commons.imaging.ImageFormats;
import org.apache.commons.imaging.ImageInfo;
import org.apache.commons.imaging.ImagingException;
import org.apache.commons.imaging.bytesource.ByteSource;
import org.apache.commons.imaging.common.Allocator;
import org.apache.commons.imaging.common.ImageMetadata;
import org.apache.commons.imaging.common.XmpEmbeddable;
import org.apache.commons.imaging.common.XmpImagingParameters;
import org.apache.commons.imaging.formats.jpeg.decoder.JpegDecoder;
import org.apache.commons.imaging.formats.jpeg.iptc.IptcParser;
import org.apache.commons.imaging.formats.jpeg.iptc.PhotoshopApp13Data;
import org.apache.commons.imaging.formats.jpeg.segments.AbstractGenericSegment;
import org.apache.commons.imaging.formats.jpeg.segments.AbstractSegment;
import org.apache.commons.imaging.formats.jpeg.segments.App13Segment;
import org.apache.commons.imaging.formats.jpeg.segments.App14Segment;
import org.apache.commons.imaging.formats.jpeg.segments.App2Segment;
import org.apache.commons.imaging.formats.jpeg.segments.ComSegment;
import org.apache.commons.imaging.formats.jpeg.segments.DqtSegment;
import org.apache.commons.imaging.formats.jpeg.segments.JfifSegment;
import org.apache.commons.imaging.formats.jpeg.segments.SofnSegment;
import org.apache.commons.imaging.formats.jpeg.segments.UnknownSegment;
import org.apache.commons.imaging.formats.jpeg.xmp.JpegXmpParser;
import org.apache.commons.imaging.formats.tiff.TiffField;
import org.apache.commons.imaging.formats.tiff.TiffImageMetadata;
import org.apache.commons.imaging.formats.tiff.TiffImageParser;
import org.apache.commons.imaging.formats.tiff.TiffImagingParameters;
import org.apache.commons.imaging.formats.tiff.constants.TiffTagConstants;
import org.apache.commons.imaging.internal.Debug;
import org.apache.commons.lang3.ArrayUtils;
public class JpegImageParser extends AbstractImageParser<JpegImagingParameters> implements XmpEmbeddable<JpegImagingParameters> {
private static final Logger LOGGER = Logger.getLogger(JpegImageParser.class.getName());
private static final String DEFAULT_EXTENSION = ImageFormats.JPEG.getDefaultExtension();
private static final String[] ACCEPTED_EXTENSIONS = ImageFormats.JPEG.getExtensions();
public static boolean isExifApp1Segment(final AbstractGenericSegment segment) {
return JpegConstants.EXIF_IDENTIFIER_CODE.isStartOf(segment.getSegmentData());
}
/**
* Constructs a new instance with the big-endian byte order.
*/
public JpegImageParser() {
// empty
}
private byte[] assembleSegments(final List<App2Segment> segments) throws ImagingException {
try {
return assembleSegments(segments, false);
} catch (final ImagingException e) {
return assembleSegments(segments, true);
}
}
private byte[] assembleSegments(final List<App2Segment> segments, final boolean startWithZero) throws ImagingException {
if (segments.isEmpty()) {
throw new ImagingException("No App2 Segments Found.");
}
final int markerCount = segments.get(0).numMarkers;
if (segments.size() != markerCount) {
throw new ImagingException("App2 Segments Missing. Found: " + segments.size() + ", Expected: " + markerCount + ".");
}
Collections.sort(segments);
final int offset = startWithZero ? 0 : 1;
int total = 0;
for (int i = 0; i < segments.size(); i++) {
final App2Segment segment = segments.get(i);
if (i + offset != segment.curMarker) {
dumpSegments(segments);
throw new ImagingException("Incoherent App2 Segment Ordering. i: " + i + ", segment[" + i + "].curMarker: " + segment.curMarker + ".");
}
if (markerCount != segment.numMarkers) {
dumpSegments(segments);
throw new ImagingException(
"Inconsistent App2 Segment Count info. markerCount: " + markerCount + ", segment[" + i + "].numMarkers: " + segment.numMarkers + ".");
}
if (segment.getIccBytes() != null) {
total += segment.getIccBytes().length;
}
}
final byte[] result = Allocator.byteArray(total);
int progress = 0;
for (final App2Segment segment : segments) {
System.arraycopy(segment.getIccBytes(), 0, result, progress, segment.getIccBytes().length);
progress += segment.getIccBytes().length;
}
return result;
}
@Override
public boolean dumpImageFile(final PrintWriter pw, final ByteSource byteSource) throws ImagingException, IOException {
pw.println("jpeg.dumpImageFile");
{
final ImageInfo imageInfo = getImageInfo(byteSource);
if (imageInfo == null) {
return false;
}
imageInfo.toString(pw, "");
}
pw.println("");
{
final List<AbstractSegment> abstractSegments = readSegments(byteSource, null, false);
if (abstractSegments == null) {
throw new ImagingException("No Segments Found.");
}
for (int d = 0; d < abstractSegments.size(); d++) {
final AbstractSegment abstractSegment = abstractSegments.get(d);
final NumberFormat nf = NumberFormat.getIntegerInstance();
// this.debugNumber("found, marker: ", marker, 4);
pw.println(d + ": marker: " + Integer.toHexString(abstractSegment.marker) + ", " + abstractSegment.getDescription() + " (length: "
+ nf.format(abstractSegment.length) + ")");
abstractSegment.dump(pw);
}
pw.println("");
}
return true;
}
private void dumpSegments(final List<? extends AbstractSegment> v) {
Debug.debug();
Debug.debug("dumpSegments: " + v.size());
for (int i = 0; i < v.size(); i++) {
final App2Segment segment = (App2Segment) v.get(i);
Debug.debug(i + ": " + segment.curMarker + " / " + segment.numMarkers);
}
Debug.debug();
}
private List<AbstractSegment> filterApp1Segments(final List<AbstractSegment> abstractSegments) {
final List<AbstractSegment> result = new ArrayList<>();
for (final AbstractSegment s : abstractSegments) {
final AbstractGenericSegment segment = (AbstractGenericSegment) s;
if (isExifApp1Segment(segment)) {
result.add(segment);
}
}
return result;
}
@Override
protected String[] getAcceptedExtensions() {
return ACCEPTED_EXTENSIONS;
}
@Override
protected ImageFormat[] getAcceptedTypes() {
return new ImageFormat[] { ImageFormats.JPEG, //
};
}
@Override
public final BufferedImage getBufferedImage(final ByteSource byteSource, final JpegImagingParameters params) throws ImagingException, IOException {
final JpegDecoder jpegDecoder = new JpegDecoder();
return jpegDecoder.decode(byteSource);
}
@Override
public String getDefaultExtension() {
return DEFAULT_EXTENSION;
}
@Override
public JpegImagingParameters getDefaultParameters() {
return new JpegImagingParameters();
}
public TiffImageMetadata getExifMetadata(final ByteSource byteSource, TiffImagingParameters params) throws ImagingException, IOException {
final byte[] bytes = getExifRawData(byteSource);
if (null == bytes) {
return null;
}
if (params == null) {
params = new TiffImagingParameters();
}
params.setReadThumbnails(Boolean.TRUE);
return (TiffImageMetadata) new TiffImageParser().getMetadata(bytes, params);
}
public byte[] getExifRawData(final ByteSource byteSource) throws ImagingException, IOException {
final List<AbstractSegment> abstractSegments = readSegments(byteSource, new int[] { JpegConstants.JPEG_APP1_MARKER, }, false);
if (abstractSegments == null || abstractSegments.isEmpty()) {
return null;
}
final List<AbstractSegment> exifSegments = filterApp1Segments(abstractSegments);
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest("exifSegments.size(): " + exifSegments.size());
}
// Debug.debug("segments", segments);
// Debug.debug("exifSegments", exifSegments);
// TODO: concatenate if multiple segments, need example.
if (exifSegments.isEmpty()) {
return null;
}
if (exifSegments.size() > 1) {
throw new ImagingException(
"Imaging currently can't parse EXIF metadata split across multiple APP1 segments. Please send this image to the Imaging project.");
}
final AbstractGenericSegment segment = (AbstractGenericSegment) exifSegments.get(0);
final byte[] bytes = segment.getSegmentData();
// byte[] head = readBytearray("exif head", bytes, 0, 6);
//
// Debug.debug("head", head);
return remainingBytes("trimmed exif bytes", bytes, 6);
}
@Override
public byte[] getIccProfileBytes(final ByteSource byteSource, final JpegImagingParameters params) throws ImagingException, IOException {
final List<AbstractSegment> abstractSegments = readSegments(byteSource, new int[] { JpegConstants.JPEG_APP2_MARKER, }, false);
final List<App2Segment> filtered = new ArrayList<>();
if (abstractSegments != null) {
// throw away non-icc profile app2 segments.
for (final AbstractSegment s : abstractSegments) {
final App2Segment segment = (App2Segment) s;
if (segment.getIccBytes() != null) {
filtered.add(segment);
}
}
}
if (filtered.isEmpty()) {
return null;
}
final byte[] bytes = assembleSegments(filtered);
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest("bytes: " + bytes.length);
}
return bytes;
}
@Override
public ImageInfo getImageInfo(final ByteSource byteSource, final JpegImagingParameters params) throws ImagingException, IOException {
// List allSegments = readSegments(byteSource, null, false);
final List<AbstractSegment> SOF_segments = readSegments(byteSource, new int[] {
// kJFIFMarker,
JpegConstants.SOF0_MARKER, JpegConstants.SOF1_MARKER, JpegConstants.SOF2_MARKER, JpegConstants.SOF3_MARKER, JpegConstants.SOF5_MARKER,
JpegConstants.SOF6_MARKER, JpegConstants.SOF7_MARKER, JpegConstants.SOF9_MARKER, JpegConstants.SOF10_MARKER, JpegConstants.SOF11_MARKER,
JpegConstants.SOF13_MARKER, JpegConstants.SOF14_MARKER, JpegConstants.SOF15_MARKER,
}, false);
if (SOF_segments == null) {
throw new ImagingException("No SOFN Data Found.");
}
// if (SOF_segments.size() != 1)
// System.out.println("Incoherent SOFN Data Found: "
// + SOF_segments.size());
final List<AbstractSegment> jfifSegments = readSegments(byteSource, new int[] { JpegConstants.JFIF_MARKER, }, true);
final SofnSegment fSOFNSegment = (SofnSegment) SOF_segments.get(0);
// SofnSegment fSOFNSegment = (SofnSegment) findSegment(segments,
// SOFNmarkers);
if (fSOFNSegment == null) {
throw new ImagingException("No SOFN Data Found.");
}
final int width = fSOFNSegment.width;
final int height = fSOFNSegment.height;
JfifSegment jfifSegment = null;
if (jfifSegments != null && !jfifSegments.isEmpty()) {
jfifSegment = (JfifSegment) jfifSegments.get(0);
}
final List<AbstractSegment> app14Segments = readSegments(byteSource, new int[] { JpegConstants.JPEG_APP14_MARKER }, true);
App14Segment app14Segment = null;
if (app14Segments != null && !app14Segments.isEmpty()) {
app14Segment = (App14Segment) app14Segments.get(0);
}
// JfifSegment fTheJFIFSegment = (JfifSegment) findSegment(segments,
// kJFIFMarker);
double xDensity = -1.0;
double yDensity = -1.0;
double unitsPerInch = -1.0;
// int JFIF_major_version;
// int JFIF_minor_version;
final String formatDetails;
if (jfifSegment != null) {
xDensity = jfifSegment.xDensity;
yDensity = jfifSegment.yDensity;
final int densityUnits = jfifSegment.densityUnits;
// JFIF_major_version = fTheJFIFSegment.JFIF_major_version;
// JFIF_minor_version = fTheJFIFSegment.JFIF_minor_version;
formatDetails = "Jpeg/JFIF v." + jfifSegment.jfifMajorVersion + "." + jfifSegment.jfifMinorVersion;
switch (densityUnits) {
case 0:
break;
case 1: // inches
unitsPerInch = 1.0;
break;
case 2: // cms
unitsPerInch = 2.54;
break;
default:
break;
}
} else {
final JpegImageMetadata metadata = (JpegImageMetadata) getMetadata(byteSource, params);
if (metadata != null) {
{
final TiffField field = metadata.findExifValue(TiffTagConstants.TIFF_TAG_XRESOLUTION);
if (field != null) {
xDensity = ((Number) field.getValue()).doubleValue();
}
}
{
final TiffField field = metadata.findExifValue(TiffTagConstants.TIFF_TAG_YRESOLUTION);
if (field != null) {
yDensity = ((Number) field.getValue()).doubleValue();
}
}
{
final TiffField field = metadata.findExifValue(TiffTagConstants.TIFF_TAG_RESOLUTION_UNIT);
if (field != null) {
final int densityUnits = ((Number) field.getValue()).intValue();
switch (densityUnits) {
case 1:
break;
case 2: // inches
unitsPerInch = 1.0;
break;
case 3: // cms
unitsPerInch = 2.54;
break;
default:
break;
}
}
}
}
formatDetails = "Jpeg/DCM";
}
int physicalHeightDpi = -1;
float physicalHeightInch = -1;
int physicalWidthDpi = -1;
float physicalWidthInch = -1;
if (unitsPerInch > 0) {
physicalWidthDpi = (int) Math.round(xDensity * unitsPerInch);
physicalWidthInch = (float) (width / (xDensity * unitsPerInch));
physicalHeightDpi = (int) Math.round(yDensity * unitsPerInch);
physicalHeightInch = (float) (height / (yDensity * unitsPerInch));
}
final List<AbstractSegment> commentSegments = readSegments(byteSource, new int[] { JpegConstants.COM_MARKER }, false);
final List<String> comments = Allocator.arrayList(commentSegments.size());
for (final AbstractSegment commentSegment : commentSegments) {
final ComSegment comSegment = (ComSegment) commentSegment;
comments.add(new String(comSegment.getComment(), StandardCharsets.UTF_8));
}
final int numberOfComponents = fSOFNSegment.numberOfComponents;
final int precision = fSOFNSegment.precision;
final int bitsPerPixel = numberOfComponents * precision;
final ImageFormat format = ImageFormats.JPEG;
final String formatName = "JPEG (Joint Photographic Experts Group) Format";
final String mimeType = "image/jpeg";
// TODO: we ought to count images, but don't yet.
final int numberOfImages = 1;
// not accurate ... only reflects first
final boolean progressive = fSOFNSegment.marker == JpegConstants.SOF2_MARKER;
boolean transparent = false;
final boolean usesPalette = false; // TODO: inaccurate.
// See https://docs.oracle.com/javase/8/docs/api/javax/imageio/metadata/doc-files/jpeg_metadata.html#color
ImageInfo.ColorType colorType = ImageInfo.ColorType.UNKNOWN;
// Some images have both JFIF/APP0 and APP14.
// JFIF is meant to win but in them APP14 is clearly right, so make it win.
if (app14Segment != null && app14Segment.isAdobeJpegSegment()) {
final int colorTransform = app14Segment.getAdobeColorTransform();
switch (colorTransform) {
case App14Segment.ADOBE_COLOR_TRANSFORM_UNKNOWN:
if (numberOfComponents == 3) {
colorType = ImageInfo.ColorType.RGB;
} else if (numberOfComponents == 4) {
colorType = ImageInfo.ColorType.CMYK;
}
break;
case App14Segment.ADOBE_COLOR_TRANSFORM_YCbCr:
colorType = ImageInfo.ColorType.YCbCr;
break;
case App14Segment.ADOBE_COLOR_TRANSFORM_YCCK:
colorType = ImageInfo.ColorType.YCCK;
break;
default:
break;
}
} else if (jfifSegment != null) {
if (numberOfComponents == 1) {
colorType = ImageInfo.ColorType.GRAYSCALE;
} else if (numberOfComponents == 3) {
colorType = ImageInfo.ColorType.YCbCr;
}
} else {
switch (numberOfComponents) {
case 1:
colorType = ImageInfo.ColorType.GRAYSCALE;
break;
case 2:
colorType = ImageInfo.ColorType.GRAYSCALE;
transparent = true;
break;
case 3:
case 4:
boolean have1 = false;
boolean have2 = false;
boolean have3 = false;
boolean have4 = false;
boolean haveOther = false;
for (final SofnSegment.Component component : fSOFNSegment.getComponents()) {
final int id = component.componentIdentifier;
switch (id) {
case 1:
have1 = true;
break;
case 2:
have2 = true;
break;
case 3:
have3 = true;
break;
case 4:
have4 = true;
break;
default:
haveOther = true;
break;
}
}
if (numberOfComponents == 3 && have1 && have2 && have3 && !have4 && !haveOther) {
colorType = ImageInfo.ColorType.YCbCr;
} else if (numberOfComponents == 4 && have1 && have2 && have3 && have4 && !haveOther) {
colorType = ImageInfo.ColorType.YCbCr;
transparent = true;
} else {
boolean haveR = false;
boolean haveG = false;
boolean haveB = false;
boolean haveA = false;
boolean haveC = false;
boolean havec = false;
boolean haveY = false;
for (final SofnSegment.Component component : fSOFNSegment.getComponents()) {
final int id = component.componentIdentifier;
switch (id) {
case 'R':
haveR = true;
break;
case 'G':
haveG = true;
break;
case 'B':
haveB = true;
break;
case 'A':
haveA = true;
break;
case 'C':
haveC = true;
break;
case 'c':
havec = true;
break;
case 'Y':
haveY = true;
break;
default:
break;
}
}
if (haveR && haveG && haveB && !haveA && !haveC && !havec && !haveY) {
colorType = ImageInfo.ColorType.RGB;
} else if (haveR && haveG && haveB && haveA && !haveC && !havec && !haveY) {
colorType = ImageInfo.ColorType.RGB;
transparent = true;
} else if (haveY && haveC && havec && !haveR && !haveG && !haveB && !haveA) {
colorType = ImageInfo.ColorType.YCC;
} else if (haveY && haveC && havec && haveA && !haveR && !haveG && !haveB) {
colorType = ImageInfo.ColorType.YCC;
transparent = true;
} else {
int minHorizontalSamplingFactor = Integer.MAX_VALUE;
int maxHorizontalSmaplingFactor = Integer.MIN_VALUE;
int minVerticalSamplingFactor = Integer.MAX_VALUE;
int maxVerticalSamplingFactor = Integer.MIN_VALUE;
for (final SofnSegment.Component component : fSOFNSegment.getComponents()) {
if (minHorizontalSamplingFactor > component.horizontalSamplingFactor) {
minHorizontalSamplingFactor = component.horizontalSamplingFactor;
}
if (maxHorizontalSmaplingFactor < component.horizontalSamplingFactor) {
maxHorizontalSmaplingFactor = component.horizontalSamplingFactor;
}
if (minVerticalSamplingFactor > component.verticalSamplingFactor) {
minVerticalSamplingFactor = component.verticalSamplingFactor;
}
if (maxVerticalSamplingFactor < component.verticalSamplingFactor) {
maxVerticalSamplingFactor = component.verticalSamplingFactor;
}
}
final boolean isSubsampled = minHorizontalSamplingFactor != maxHorizontalSmaplingFactor
|| minVerticalSamplingFactor != maxVerticalSamplingFactor;
if (numberOfComponents == 3) {
if (isSubsampled) {
colorType = ImageInfo.ColorType.YCbCr;
} else {
colorType = ImageInfo.ColorType.RGB;
}
} else if (numberOfComponents == 4) {
if (isSubsampled) {
colorType = ImageInfo.ColorType.YCCK;
} else {
colorType = ImageInfo.ColorType.CMYK;
}
}
}
}
break;
default:
break;
}
}
final ImageInfo.CompressionAlgorithm compressionAlgorithm = ImageInfo.CompressionAlgorithm.JPEG;
return new ImageInfo(formatDetails, bitsPerPixel, comments, format, formatName, height, mimeType, numberOfImages, physicalHeightDpi, physicalHeightInch,
physicalWidthDpi, physicalWidthInch, width, progressive, transparent, usesPalette, colorType, compressionAlgorithm);
}
@Override
public Dimension getImageSize(final ByteSource byteSource, final JpegImagingParameters params) throws ImagingException, IOException {
final List<AbstractSegment> abstractSegments = readSegments(byteSource, new int[] {
// kJFIFMarker,
JpegConstants.SOF0_MARKER, JpegConstants.SOF1_MARKER, JpegConstants.SOF2_MARKER, JpegConstants.SOF3_MARKER, JpegConstants.SOF5_MARKER,
JpegConstants.SOF6_MARKER, JpegConstants.SOF7_MARKER, JpegConstants.SOF9_MARKER, JpegConstants.SOF10_MARKER, JpegConstants.SOF11_MARKER,
JpegConstants.SOF13_MARKER, JpegConstants.SOF14_MARKER, JpegConstants.SOF15_MARKER,
}, true);
if (abstractSegments == null || abstractSegments.isEmpty()) {
throw new ImagingException("No JFIF Data Found.");
}
if (abstractSegments.size() > 1) {
throw new ImagingException("Redundant JFIF Data Found.");
}
final SofnSegment fSOFNSegment = (SofnSegment) abstractSegments.get(0);
return new Dimension(fSOFNSegment.width, fSOFNSegment.height);
}
@Override
public ImageMetadata getMetadata(final ByteSource byteSource, JpegImagingParameters params) throws ImagingException, IOException {
if (params == null) {
params = new JpegImagingParameters();
}
final TiffImageMetadata exif = getExifMetadata(byteSource, new TiffImagingParameters());
final JpegPhotoshopMetadata photoshop = getPhotoshopMetadata(byteSource, params);
if (null == exif && null == photoshop) {
return null;
}
return new JpegImageMetadata(photoshop, exif);
}
@Override
public String getName() {
return "Jpeg-Custom";
}
public JpegPhotoshopMetadata getPhotoshopMetadata(final ByteSource byteSource, final JpegImagingParameters params) throws ImagingException, IOException {
final List<AbstractSegment> abstractSegments = readSegments(byteSource, new int[] { JpegConstants.JPEG_APP13_MARKER, }, false);
if (abstractSegments == null || abstractSegments.isEmpty()) {
return null;
}
PhotoshopApp13Data photoshopApp13Data = null;
for (final AbstractSegment s : abstractSegments) {
final App13Segment segment = (App13Segment) s;
final PhotoshopApp13Data data = segment.parsePhotoshopSegment(params);
if (data != null) {
if (photoshopApp13Data != null) {
throw new ImagingException("JPEG contains more than one Photoshop App13 segment.");
}
photoshopApp13Data = data;
}
}
if (null == photoshopApp13Data) {
return null;
}
return new JpegPhotoshopMetadata(photoshopApp13Data);
}
/**
* Extracts embedded XML metadata as XML string.
* <p>
*
* @param byteSource File containing image data.
* @param params Map of optional parameters, defined in ImagingConstants.
* @return Xmp Xml as String, if present. Otherwise, returns null.
*/
@Override
public String getXmpXml(final ByteSource byteSource, final XmpImagingParameters<JpegImagingParameters> params) throws ImagingException, IOException {
final List<String> result = new ArrayList<>();
final JpegUtils.Visitor visitor = new JpegUtils.Visitor() {
// return false to exit before reading image data.
@Override
public boolean beginSos() {
return false;
}
// return false to exit traversal.
@Override
public boolean visitSegment(final int marker, final byte[] markerBytes, final int markerLength, final byte[] markerLengthBytes,
final byte[] segmentData) throws ImagingException {
if (marker == 0xffd9) {
return false;
}
if (marker == JpegConstants.JPEG_APP1_MARKER && new JpegXmpParser().isXmpJpegSegment(segmentData)) {
result.add(new JpegXmpParser().parseXmpJpegSegment(segmentData));
return false;
}
return true;
}
@Override
public void visitSos(final int marker, final byte[] markerBytes, final byte[] imageData) {
// don't need image data
}
};
new JpegUtils().traverseJfif(byteSource, visitor);
if (result.isEmpty()) {
return null;
}
if (result.size() > 1) {
throw new ImagingException("JPEG file contains more than one XMP segment.");
}
return result.get(0);
}
public boolean hasExifSegment(final ByteSource byteSource) throws ImagingException, IOException {
final boolean[] result = { false, };
final JpegUtils.Visitor visitor = new JpegUtils.Visitor() {
// return false to exit before reading image data.
@Override
public boolean beginSos() {
return false;
}
// return false to exit traversal.
@Override
public boolean visitSegment(final int marker, final byte[] markerBytes, final int markerLength, final byte[] markerLengthBytes,
final byte[] segmentData) {
if (marker == 0xffd9) {
return false;
}
if (marker == JpegConstants.JPEG_APP1_MARKER && JpegConstants.EXIF_IDENTIFIER_CODE.isStartOf(segmentData)) {
result[0] = true;
return false;
}
return true;
}
@Override
public void visitSos(final int marker, final byte[] markerBytes, final byte[] imageData) {
// don't need image data
}
};
new JpegUtils().traverseJfif(byteSource, visitor);
return result[0];
}
public boolean hasIptcSegment(final ByteSource byteSource) throws ImagingException, IOException {
final boolean[] result = { false, };
final JpegUtils.Visitor visitor = new JpegUtils.Visitor() {
// return false to exit before reading image data.
@Override
public boolean beginSos() {
return false;
}
// return false to exit traversal.
@Override
public boolean visitSegment(final int marker, final byte[] markerBytes, final int markerLength, final byte[] markerLengthBytes,
final byte[] segmentData) {
if (marker == 0xffd9) {
return false;
}
if (marker == JpegConstants.JPEG_APP13_MARKER && new IptcParser().isPhotoshopJpegSegment(segmentData)) {
result[0] = true;
return false;
}
return true;
}
@Override
public void visitSos(final int marker, final byte[] markerBytes, final byte[] imageData) {
// don't need image data
}
};
new JpegUtils().traverseJfif(byteSource, visitor);
return result[0];
}
public boolean hasXmpSegment(final ByteSource byteSource) throws ImagingException, IOException {
final boolean[] result = { false, };
final JpegUtils.Visitor visitor = new JpegUtils.Visitor() {
// return false to exit before reading image data.
@Override
public boolean beginSos() {
return false;
}
// return false to exit traversal.
@Override
public boolean visitSegment(final int marker, final byte[] markerBytes, final int markerLength, final byte[] markerLengthBytes,
final byte[] segmentData) {
if (marker == 0xffd9) {
return false;
}
if (marker == JpegConstants.JPEG_APP1_MARKER && new JpegXmpParser().isXmpJpegSegment(segmentData)) {
result[0] = true;
return false;
}
return true;
}
@Override
public void visitSos(final int marker, final byte[] markerBytes, final byte[] imageData) {
// don't need image data
}
};
new JpegUtils().traverseJfif(byteSource, visitor);
return result[0];
}
private boolean keepMarker(final int marker, final int[] markers) {
return ArrayUtils.contains(markers, marker);
}
public List<AbstractSegment> readSegments(final ByteSource byteSource, final int[] markers, final boolean returnAfterFirst)
throws ImagingException, IOException {
final List<AbstractSegment> result = new ArrayList<>();
final int[] sofnSegments = {
// kJFIFMarker,
JpegConstants.SOF0_MARKER, JpegConstants.SOF1_MARKER, JpegConstants.SOF2_MARKER, JpegConstants.SOF3_MARKER, JpegConstants.SOF5_MARKER,
JpegConstants.SOF6_MARKER, JpegConstants.SOF7_MARKER, JpegConstants.SOF9_MARKER, JpegConstants.SOF10_MARKER, JpegConstants.SOF11_MARKER,
JpegConstants.SOF13_MARKER, JpegConstants.SOF14_MARKER, JpegConstants.SOF15_MARKER, };
final JpegUtils.Visitor visitor = new JpegUtils.Visitor() {
// return false to exit before reading image data.
@Override
public boolean beginSos() {
return false;
}
// return false to exit traversal.
@Override
public boolean visitSegment(final int marker, final byte[] markerBytes, final int markerLength, final byte[] markerLengthBytes,
final byte[] segmentData) throws ImagingException, IOException {
if (marker == JpegConstants.EOI_MARKER) {
return false;
}
// Debug.debug("visitSegment marker", marker);
// // Debug.debug("visitSegment keepMarker(marker, markers)",
// keepMarker(marker, markers));
// Debug.debug("visitSegment keepMarker(marker, markers)",
// keepMarker(marker, markers));
if (!keepMarker(marker, markers)) {
return true;
}
switch (marker) {
case JpegConstants.JPEG_APP13_MARKER:
// Debug.debug("app 13 segment data", segmentData.length);
result.add(new App13Segment(marker, segmentData));
break;
case JpegConstants.JPEG_APP14_MARKER:
result.add(new App14Segment(marker, segmentData));
break;
case JpegConstants.JPEG_APP2_MARKER:
result.add(new App2Segment(marker, segmentData));
break;
case JpegConstants.JFIF_MARKER:
result.add(new JfifSegment(marker, segmentData));
break;
default:
if (Arrays.binarySearch(sofnSegments, marker) >= 0) {
result.add(new SofnSegment(marker, segmentData));
} else if (marker == JpegConstants.DQT_MARKER) {
result.add(new DqtSegment(marker, segmentData));
} else if (marker >= JpegConstants.JPEG_APP1_MARKER && marker <= JpegConstants.JPEG_APP15_MARKER) {
result.add(new UnknownSegment(marker, segmentData));
} else if (marker == JpegConstants.COM_MARKER) {
result.add(new ComSegment(marker, segmentData));
}
break;
}
return !returnAfterFirst;
}
@Override
public void visitSos(final int marker, final byte[] markerBytes, final byte[] imageData) {
// don't need image data
}
};
new JpegUtils().traverseJfif(byteSource, visitor);
return result;
}
}
|
apache/hadoop-common | 37,427 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.tools;
import java.io.IOException;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolStats;
import org.apache.hadoop.tools.TableListing;
import org.apache.hadoop.tools.TableListing.Justification;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import com.google.common.base.Joiner;
/**
* This class implements command-line operations on the HDFS Cache.
*/
@InterfaceAudience.Private
public class CacheAdmin extends Configured implements Tool {
/**
* Maximum length for printed lines
*/
private static final int MAX_LINE_WIDTH = 80;
public CacheAdmin() {
this(null);
}
public CacheAdmin(Configuration conf) {
super(conf);
}
@Override
public int run(String[] args) throws IOException {
if (args.length == 0) {
printUsage(false);
return 1;
}
Command command = determineCommand(args[0]);
if (command == null) {
System.err.println("Can't understand command '" + args[0] + "'");
if (!args[0].startsWith("-")) {
System.err.println("Command names must start with dashes.");
}
printUsage(false);
return 1;
}
List<String> argsList = new LinkedList<String>();
for (int j = 1; j < args.length; j++) {
argsList.add(args[j]);
}
try {
return command.run(getConf(), argsList);
} catch (IllegalArgumentException e) {
System.err.println(prettifyException(e));
return -1;
}
}
public static void main(String[] argsArray) throws IOException {
CacheAdmin cacheAdmin = new CacheAdmin(new Configuration());
System.exit(cacheAdmin.run(argsArray));
}
private static DistributedFileSystem getDFS(Configuration conf)
throws IOException {
FileSystem fs = FileSystem.get(conf);
if (!(fs instanceof DistributedFileSystem)) {
throw new IllegalArgumentException("FileSystem " + fs.getUri() +
" is not an HDFS file system");
}
return (DistributedFileSystem)fs;
}
/**
* NN exceptions contain the stack trace as part of the exception message.
* When it's a known error, pretty-print the error and squish the stack trace.
*/
private static String prettifyException(Exception e) {
return e.getClass().getSimpleName() + ": "
+ e.getLocalizedMessage().split("\n")[0];
}
private static TableListing getOptionDescriptionListing() {
TableListing listing = new TableListing.Builder()
.addField("").addField("", true)
.wrapWidth(MAX_LINE_WIDTH).hideHeaders().build();
return listing;
}
/**
* Parses a time-to-live value from a string
* @return The ttl in milliseconds
* @throws IOException if it could not be parsed
*/
private static Long parseTtlString(String maxTtlString) throws IOException {
Long maxTtl = null;
if (maxTtlString != null) {
if (maxTtlString.equalsIgnoreCase("never")) {
maxTtl = CachePoolInfo.RELATIVE_EXPIRY_NEVER;
} else {
maxTtl = DFSUtil.parseRelativeTime(maxTtlString);
}
}
return maxTtl;
}
private static Long parseLimitString(String limitString) {
Long limit = null;
if (limitString != null) {
if (limitString.equalsIgnoreCase("unlimited")) {
limit = CachePoolInfo.LIMIT_UNLIMITED;
} else {
limit = Long.parseLong(limitString);
}
}
return limit;
}
private static Expiration parseExpirationString(String ttlString)
throws IOException {
Expiration ex = null;
if (ttlString != null) {
if (ttlString.equalsIgnoreCase("never")) {
ex = CacheDirectiveInfo.Expiration.NEVER;
} else {
long ttl = DFSUtil.parseRelativeTime(ttlString);
ex = CacheDirectiveInfo.Expiration.newRelative(ttl);
}
}
return ex;
}
interface Command {
String getName();
String getShortUsage();
String getLongUsage();
int run(Configuration conf, List<String> args) throws IOException;
}
private static class AddCacheDirectiveInfoCommand implements Command {
@Override
public String getName() {
return "-addDirective";
}
@Override
public String getShortUsage() {
return "[" + getName() +
" -path <path> -pool <pool-name> " +
"[-force] " +
"[-replication <replication>] [-ttl <time-to-live>]]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<path>", "A path to cache. The path can be " +
"a directory or a file.");
listing.addRow("<pool-name>", "The pool to which the directive will be " +
"added. You must have write permission on the cache pool "
+ "in order to add new directives.");
listing.addRow("-force",
"Skips checking of cache pool resource limits.");
listing.addRow("<replication>", "The cache replication factor to use. " +
"Defaults to 1.");
listing.addRow("<time-to-live>", "How long the directive is " +
"valid. Can be specified in minutes, hours, and days, e.g. " +
"30m, 4h, 2d. Valid units are [smhd]." +
" \"never\" indicates a directive that never expires." +
" If unspecified, the directive never expires.");
return getShortUsage() + "\n" +
"Add a new cache directive.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("You must specify a path with -path.");
return 1;
}
builder.setPath(new Path(path));
String poolName = StringUtils.popOptionWithArgument("-pool", args);
if (poolName == null) {
System.err.println("You must specify a pool name with -pool.");
return 1;
}
builder.setPool(poolName);
boolean force = StringUtils.popOption("-force", args);
String replicationString =
StringUtils.popOptionWithArgument("-replication", args);
if (replicationString != null) {
Short replication = Short.parseShort(replicationString);
builder.setReplication(replication);
}
String ttlString = StringUtils.popOptionWithArgument("-ttl", args);
try {
Expiration ex = parseExpirationString(ttlString);
if (ex != null) {
builder.setExpiration(ex);
}
} catch (IOException e) {
System.err.println(
"Error while parsing ttl value: " + e.getMessage());
return 1;
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
CacheDirectiveInfo directive = builder.build();
EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class);
if (force) {
flags.add(CacheFlag.FORCE);
}
try {
long id = dfs.addCacheDirective(directive, flags);
System.out.println("Added cache directive " + id);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
return 0;
}
}
private static class RemoveCacheDirectiveInfoCommand implements Command {
@Override
public String getName() {
return "-removeDirective";
}
@Override
public String getShortUsage() {
return "[" + getName() + " <id>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<id>", "The id of the cache directive to remove. " +
"You must have write permission on the pool of the " +
"directive in order to remove it. To see a list " +
"of cache directive IDs, use the -listDirectives command.");
return getShortUsage() + "\n" +
"Remove a cache directive.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String idString= StringUtils.popFirstNonOption(args);
if (idString == null) {
System.err.println("You must specify a directive ID to remove.");
return 1;
}
long id;
try {
id = Long.parseLong(idString);
} catch (NumberFormatException e) {
System.err.println("Invalid directive ID " + idString + ": expected " +
"a numeric value.");
return 1;
}
if (id <= 0) {
System.err.println("Invalid directive ID " + id + ": ids must " +
"be greater than 0.");
return 1;
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
System.err.println("Usage is " + getShortUsage());
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
try {
dfs.getClient().removeCacheDirective(id);
System.out.println("Removed cached directive " + id);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
return 0;
}
}
private static class ModifyCacheDirectiveInfoCommand implements Command {
@Override
public String getName() {
return "-modifyDirective";
}
@Override
public String getShortUsage() {
return "[" + getName() +
" -id <id> [-path <path>] [-force] [-replication <replication>] " +
"[-pool <pool-name>] [-ttl <time-to-live>]]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<id>", "The ID of the directive to modify (required)");
listing.addRow("<path>", "A path to cache. The path can be " +
"a directory or a file. (optional)");
listing.addRow("-force",
"Skips checking of cache pool resource limits.");
listing.addRow("<replication>", "The cache replication factor to use. " +
"(optional)");
listing.addRow("<pool-name>", "The pool to which the directive will be " +
"added. You must have write permission on the cache pool "
+ "in order to move a directive into it. (optional)");
listing.addRow("<time-to-live>", "How long the directive is " +
"valid. Can be specified in minutes, hours, and days, e.g. " +
"30m, 4h, 2d. Valid units are [smhd]." +
" \"never\" indicates a directive that never expires.");
return getShortUsage() + "\n" +
"Modify a cache directive.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
CacheDirectiveInfo.Builder builder =
new CacheDirectiveInfo.Builder();
boolean modified = false;
String idString = StringUtils.popOptionWithArgument("-id", args);
if (idString == null) {
System.err.println("You must specify a directive ID with -id.");
return 1;
}
builder.setId(Long.parseLong(idString));
String path = StringUtils.popOptionWithArgument("-path", args);
if (path != null) {
builder.setPath(new Path(path));
modified = true;
}
boolean force = StringUtils.popOption("-force", args);
String replicationString =
StringUtils.popOptionWithArgument("-replication", args);
if (replicationString != null) {
builder.setReplication(Short.parseShort(replicationString));
modified = true;
}
String poolName =
StringUtils.popOptionWithArgument("-pool", args);
if (poolName != null) {
builder.setPool(poolName);
modified = true;
}
String ttlString = StringUtils.popOptionWithArgument("-ttl", args);
try {
Expiration ex = parseExpirationString(ttlString);
if (ex != null) {
builder.setExpiration(ex);
modified = true;
}
} catch (IOException e) {
System.err.println(
"Error while parsing ttl value: " + e.getMessage());
return 1;
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
System.err.println("Usage is " + getShortUsage());
return 1;
}
if (!modified) {
System.err.println("No modifications were specified.");
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class);
if (force) {
flags.add(CacheFlag.FORCE);
}
try {
dfs.modifyCacheDirective(builder.build(), flags);
System.out.println("Modified cache directive " + idString);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
return 0;
}
}
private static class RemoveCacheDirectiveInfosCommand implements Command {
@Override
public String getName() {
return "-removeDirectives";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -path <path>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("-path <path>", "The path of the cache directives to remove. " +
"You must have write permission on the pool of the directive in order " +
"to remove it. To see a list of cache directives, use the " +
"-listDirectives command.");
return getShortUsage() + "\n" +
"Remove every cache directive with the specified path.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("You must specify a path with -path.");
return 1;
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
System.err.println("Usage is " + getShortUsage());
return 1;
}
int exitCode = 0;
try {
DistributedFileSystem dfs = getDFS(conf);
RemoteIterator<CacheDirectiveEntry> iter =
dfs.listCacheDirectives(
new CacheDirectiveInfo.Builder().
setPath(new Path(path)).build());
while (iter.hasNext()) {
CacheDirectiveEntry entry = iter.next();
try {
dfs.removeCacheDirective(entry.getInfo().getId());
System.out.println("Removed cache directive " +
entry.getInfo().getId());
} catch (IOException e) {
System.err.println(prettifyException(e));
exitCode = 2;
}
}
} catch (IOException e) {
System.err.println(prettifyException(e));
exitCode = 2;
}
if (exitCode == 0) {
System.out.println("Removed every cache directive with path " +
path);
}
return exitCode;
}
}
private static class ListCacheDirectiveInfoCommand implements Command {
@Override
public String getName() {
return "-listDirectives";
}
@Override
public String getShortUsage() {
return "[" + getName()
+ " [-stats] [-path <path>] [-pool <pool>] [-id <id>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("-stats", "List path-based cache directive statistics.");
listing.addRow("<path>", "List only " +
"cache directives with this path. " +
"Note that if there is a cache directive for <path> " +
"in a cache pool that we don't have read access for, it " +
"will not be listed.");
listing.addRow("<pool>", "List only path cache directives in that pool.");
listing.addRow("<id>", "List the cache directive with this id.");
return getShortUsage() + "\n" +
"List cache directives.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
CacheDirectiveInfo.Builder builder =
new CacheDirectiveInfo.Builder();
String pathFilter = StringUtils.popOptionWithArgument("-path", args);
if (pathFilter != null) {
builder.setPath(new Path(pathFilter));
}
String poolFilter = StringUtils.popOptionWithArgument("-pool", args);
if (poolFilter != null) {
builder.setPool(poolFilter);
}
boolean printStats = StringUtils.popOption("-stats", args);
String idFilter = StringUtils.popOptionWithArgument("-id", args);
if (idFilter != null) {
builder.setId(Long.parseLong(idFilter));
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
return 1;
}
TableListing.Builder tableBuilder = new TableListing.Builder().
addField("ID", Justification.RIGHT).
addField("POOL", Justification.LEFT).
addField("REPL", Justification.RIGHT).
addField("EXPIRY", Justification.LEFT).
addField("PATH", Justification.LEFT);
if (printStats) {
tableBuilder.addField("BYTES_NEEDED", Justification.RIGHT).
addField("BYTES_CACHED", Justification.RIGHT).
addField("FILES_NEEDED", Justification.RIGHT).
addField("FILES_CACHED", Justification.RIGHT);
}
TableListing tableListing = tableBuilder.build();
try {
DistributedFileSystem dfs = getDFS(conf);
RemoteIterator<CacheDirectiveEntry> iter =
dfs.listCacheDirectives(builder.build());
int numEntries = 0;
while (iter.hasNext()) {
CacheDirectiveEntry entry = iter.next();
CacheDirectiveInfo directive = entry.getInfo();
CacheDirectiveStats stats = entry.getStats();
List<String> row = new LinkedList<String>();
row.add("" + directive.getId());
row.add(directive.getPool());
row.add("" + directive.getReplication());
String expiry;
// This is effectively never, round for nice printing
if (directive.getExpiration().getMillis() >
Expiration.MAX_RELATIVE_EXPIRY_MS / 2) {
expiry = "never";
} else {
expiry = directive.getExpiration().toString();
}
row.add(expiry);
row.add(directive.getPath().toUri().getPath());
if (printStats) {
row.add("" + stats.getBytesNeeded());
row.add("" + stats.getBytesCached());
row.add("" + stats.getFilesNeeded());
row.add("" + stats.getFilesCached());
}
tableListing.addRow(row.toArray(new String[0]));
numEntries++;
}
System.out.print(String.format("Found %d entr%s%n",
numEntries, numEntries == 1 ? "y" : "ies"));
if (numEntries > 0) {
System.out.print(tableListing);
}
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
return 0;
}
}
private static class AddCachePoolCommand implements Command {
private static final String NAME = "-addPool";
@Override
public String getName() {
return NAME;
}
@Override
public String getShortUsage() {
return "[" + NAME + " <name> [-owner <owner>] " +
"[-group <group>] [-mode <mode>] [-limit <limit>] " +
"[-maxTtl <maxTtl>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<name>", "Name of the new pool.");
listing.addRow("<owner>", "Username of the owner of the pool. " +
"Defaults to the current user.");
listing.addRow("<group>", "Group of the pool. " +
"Defaults to the primary group name of the current user.");
listing.addRow("<mode>", "UNIX-style permissions for the pool. " +
"Permissions are specified in octal, e.g. 0755. " +
"By default, this is set to " + String.format("0%03o",
FsPermission.getCachePoolDefault().toShort()) + ".");
listing.addRow("<limit>", "The maximum number of bytes that can be " +
"cached by directives in this pool, in aggregate. By default, " +
"no limit is set.");
listing.addRow("<maxTtl>", "The maximum allowed time-to-live for " +
"directives being added to the pool. This can be specified in " +
"seconds, minutes, hours, and days, e.g. 120s, 30m, 4h, 2d. " +
"Valid units are [smhd]. By default, no maximum is set. " +
"A value of \"never\" specifies that there is no limit.");
return getShortUsage() + "\n" +
"Add a new cache pool.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String name = StringUtils.popFirstNonOption(args);
if (name == null) {
System.err.println("You must specify a name when creating a " +
"cache pool.");
return 1;
}
CachePoolInfo info = new CachePoolInfo(name);
String owner = StringUtils.popOptionWithArgument("-owner", args);
if (owner != null) {
info.setOwnerName(owner);
}
String group = StringUtils.popOptionWithArgument("-group", args);
if (group != null) {
info.setGroupName(group);
}
String modeString = StringUtils.popOptionWithArgument("-mode", args);
if (modeString != null) {
short mode = Short.parseShort(modeString, 8);
info.setMode(new FsPermission(mode));
}
String limitString = StringUtils.popOptionWithArgument("-limit", args);
Long limit = parseLimitString(limitString);
if (limit != null) {
info.setLimit(limit);
}
String maxTtlString = StringUtils.popOptionWithArgument("-maxTtl", args);
try {
Long maxTtl = parseTtlString(maxTtlString);
if (maxTtl != null) {
info.setMaxRelativeExpiryMs(maxTtl);
}
} catch (IOException e) {
System.err.println(
"Error while parsing maxTtl value: " + e.getMessage());
return 1;
}
if (!args.isEmpty()) {
System.err.print("Can't understand arguments: " +
Joiner.on(" ").join(args) + "\n");
System.err.println("Usage is " + getShortUsage());
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
try {
dfs.addCachePool(info);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
System.out.println("Successfully added cache pool " + name + ".");
return 0;
}
}
private static class ModifyCachePoolCommand implements Command {
@Override
public String getName() {
return "-modifyPool";
}
@Override
public String getShortUsage() {
return "[" + getName() + " <name> [-owner <owner>] " +
"[-group <group>] [-mode <mode>] [-limit <limit>] " +
"[-maxTtl <maxTtl>]]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<name>", "Name of the pool to modify.");
listing.addRow("<owner>", "Username of the owner of the pool");
listing.addRow("<group>", "Groupname of the group of the pool.");
listing.addRow("<mode>", "Unix-style permissions of the pool in octal.");
listing.addRow("<limit>", "Maximum number of bytes that can be cached " +
"by this pool.");
listing.addRow("<maxTtl>", "The maximum allowed time-to-live for " +
"directives being added to the pool.");
return getShortUsage() + "\n" +
WordUtils.wrap("Modifies the metadata of an existing cache pool. " +
"See usage of " + AddCachePoolCommand.NAME + " for more details.",
MAX_LINE_WIDTH) + "\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String owner = StringUtils.popOptionWithArgument("-owner", args);
String group = StringUtils.popOptionWithArgument("-group", args);
String modeString = StringUtils.popOptionWithArgument("-mode", args);
Integer mode = (modeString == null) ?
null : Integer.parseInt(modeString, 8);
String limitString = StringUtils.popOptionWithArgument("-limit", args);
Long limit = parseLimitString(limitString);
String maxTtlString = StringUtils.popOptionWithArgument("-maxTtl", args);
Long maxTtl = null;
try {
maxTtl = parseTtlString(maxTtlString);
} catch (IOException e) {
System.err.println(
"Error while parsing maxTtl value: " + e.getMessage());
return 1;
}
String name = StringUtils.popFirstNonOption(args);
if (name == null) {
System.err.println("You must specify a name when creating a " +
"cache pool.");
return 1;
}
if (!args.isEmpty()) {
System.err.print("Can't understand arguments: " +
Joiner.on(" ").join(args) + "\n");
System.err.println("Usage is " + getShortUsage());
return 1;
}
boolean changed = false;
CachePoolInfo info = new CachePoolInfo(name);
if (owner != null) {
info.setOwnerName(owner);
changed = true;
}
if (group != null) {
info.setGroupName(group);
changed = true;
}
if (mode != null) {
info.setMode(new FsPermission(mode.shortValue()));
changed = true;
}
if (limit != null) {
info.setLimit(limit);
changed = true;
}
if (maxTtl != null) {
info.setMaxRelativeExpiryMs(maxTtl);
changed = true;
}
if (!changed) {
System.err.println("You must specify at least one attribute to " +
"change in the cache pool.");
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
try {
dfs.modifyCachePool(info);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
System.out.print("Successfully modified cache pool " + name);
String prefix = " to have ";
if (owner != null) {
System.out.print(prefix + "owner name " + owner);
prefix = " and ";
}
if (group != null) {
System.out.print(prefix + "group name " + group);
prefix = " and ";
}
if (mode != null) {
System.out.print(prefix + "mode " + new FsPermission(mode.shortValue()));
prefix = " and ";
}
if (limit != null) {
System.out.print(prefix + "limit " + limit);
prefix = " and ";
}
if (maxTtl != null) {
System.out.print(prefix + "max time-to-live " + maxTtlString);
}
System.out.print("\n");
return 0;
}
}
private static class RemoveCachePoolCommand implements Command {
@Override
public String getName() {
return "-removePool";
}
@Override
public String getShortUsage() {
return "[" + getName() + " <name>]\n";
}
@Override
public String getLongUsage() {
return getShortUsage() + "\n" +
WordUtils.wrap("Remove a cache pool. This also uncaches paths " +
"associated with the pool.\n\n", MAX_LINE_WIDTH) +
"<name> Name of the cache pool to remove.\n";
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String name = StringUtils.popFirstNonOption(args);
if (name == null) {
System.err.println("You must specify a name when deleting a " +
"cache pool.");
return 1;
}
if (!args.isEmpty()) {
System.err.print("Can't understand arguments: " +
Joiner.on(" ").join(args) + "\n");
System.err.println("Usage is " + getShortUsage());
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
try {
dfs.removeCachePool(name);
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
System.out.println("Successfully removed cache pool " + name + ".");
return 0;
}
}
private static class ListCachePoolsCommand implements Command {
@Override
public String getName() {
return "-listPools";
}
@Override
public String getShortUsage() {
return "[" + getName() + " [-stats] [<name>]]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("-stats", "Display additional cache pool statistics.");
listing.addRow("<name>", "If specified, list only the named cache pool.");
return getShortUsage() + "\n" +
WordUtils.wrap("Display information about one or more cache pools, " +
"e.g. name, owner, group, permissions, etc.", MAX_LINE_WIDTH) +
"\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
String name = StringUtils.popFirstNonOption(args);
final boolean printStats = StringUtils.popOption("-stats", args);
if (!args.isEmpty()) {
System.err.print("Can't understand arguments: " +
Joiner.on(" ").join(args) + "\n");
System.err.println("Usage is " + getShortUsage());
return 1;
}
DistributedFileSystem dfs = getDFS(conf);
TableListing.Builder builder = new TableListing.Builder().
addField("NAME", Justification.LEFT).
addField("OWNER", Justification.LEFT).
addField("GROUP", Justification.LEFT).
addField("MODE", Justification.LEFT).
addField("LIMIT", Justification.RIGHT).
addField("MAXTTL", Justification.RIGHT);
if (printStats) {
builder.
addField("BYTES_NEEDED", Justification.RIGHT).
addField("BYTES_CACHED", Justification.RIGHT).
addField("BYTES_OVERLIMIT", Justification.RIGHT).
addField("FILES_NEEDED", Justification.RIGHT).
addField("FILES_CACHED", Justification.RIGHT);
}
TableListing listing = builder.build();
int numResults = 0;
try {
RemoteIterator<CachePoolEntry> iter = dfs.listCachePools();
while (iter.hasNext()) {
CachePoolEntry entry = iter.next();
CachePoolInfo info = entry.getInfo();
LinkedList<String> row = new LinkedList<String>();
if (name == null || info.getPoolName().equals(name)) {
row.add(info.getPoolName());
row.add(info.getOwnerName());
row.add(info.getGroupName());
row.add(info.getMode() != null ? info.getMode().toString() : null);
Long limit = info.getLimit();
String limitString;
if (limit != null && limit.equals(CachePoolInfo.LIMIT_UNLIMITED)) {
limitString = "unlimited";
} else {
limitString = "" + limit;
}
row.add(limitString);
Long maxTtl = info.getMaxRelativeExpiryMs();
String maxTtlString = null;
if (maxTtl != null) {
if (maxTtl.longValue() == CachePoolInfo.RELATIVE_EXPIRY_NEVER) {
maxTtlString = "never";
} else {
maxTtlString = DFSUtil.durationToString(maxTtl);
}
}
row.add(maxTtlString);
if (printStats) {
CachePoolStats stats = entry.getStats();
row.add(Long.toString(stats.getBytesNeeded()));
row.add(Long.toString(stats.getBytesCached()));
row.add(Long.toString(stats.getBytesOverlimit()));
row.add(Long.toString(stats.getFilesNeeded()));
row.add(Long.toString(stats.getFilesCached()));
}
listing.addRow(row.toArray(new String[] {}));
++numResults;
if (name != null) {
break;
}
}
}
} catch (IOException e) {
System.err.println(prettifyException(e));
return 2;
}
System.out.print(String.format("Found %d result%s.%n", numResults,
(numResults == 1 ? "" : "s")));
if (numResults > 0) {
System.out.print(listing);
}
// If list pools succeed, we return 0 (success exit code)
return 0;
}
}
private static class HelpCommand implements Command {
@Override
public String getName() {
return "-help";
}
@Override
public String getShortUsage() {
return "[-help <command-name>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
listing.addRow("<command-name>", "The command for which to get " +
"detailed help. If no command is specified, print detailed help for " +
"all commands");
return getShortUsage() + "\n" +
"Get detailed help about a command.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
if (args.size() == 0) {
for (Command command : COMMANDS) {
System.err.println(command.getLongUsage());
}
return 0;
}
if (args.size() != 1) {
System.out.println("You must give exactly one argument to -help.");
return 0;
}
String commandName = args.get(0);
// prepend a dash to match against the command names
Command command = determineCommand("-"+commandName);
if (command == null) {
System.err.print("Sorry, I don't know the command '" +
commandName + "'.\n");
System.err.print("Valid help command names are:\n");
String separator = "";
for (Command c : COMMANDS) {
System.err.print(separator + c.getName().substring(1));
separator = ", ";
}
System.err.print("\n");
return 1;
}
System.err.print(command.getLongUsage());
return 0;
}
}
private static final Command[] COMMANDS = {
new AddCacheDirectiveInfoCommand(),
new ModifyCacheDirectiveInfoCommand(),
new ListCacheDirectiveInfoCommand(),
new RemoveCacheDirectiveInfoCommand(),
new RemoveCacheDirectiveInfosCommand(),
new AddCachePoolCommand(),
new ModifyCachePoolCommand(),
new RemoveCachePoolCommand(),
new ListCachePoolsCommand(),
new HelpCommand(),
};
private static void printUsage(boolean longUsage) {
System.err.println(
"Usage: bin/hdfs cacheadmin [COMMAND]");
for (Command command : COMMANDS) {
if (longUsage) {
System.err.print(command.getLongUsage());
} else {
System.err.print(" " + command.getShortUsage());
}
}
System.err.println();
}
private static Command determineCommand(String commandName) {
for (int i = 0; i < COMMANDS.length; i++) {
if (COMMANDS[i].getName().equals(commandName)) {
return COMMANDS[i];
}
}
return null;
}
}
|
googleapis/google-cloud-java | 37,960 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/WorkloadMetadataConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* WorkloadMetadataConfig defines the metadata configuration to expose to
* workloads on the node pool.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.WorkloadMetadataConfig}
*/
public final class WorkloadMetadataConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.WorkloadMetadataConfig)
WorkloadMetadataConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use WorkloadMetadataConfig.newBuilder() to construct.
private WorkloadMetadataConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WorkloadMetadataConfig() {
nodeMetadata_ = 0;
mode_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WorkloadMetadataConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_WorkloadMetadataConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_WorkloadMetadataConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.WorkloadMetadataConfig.class,
com.google.container.v1beta1.WorkloadMetadataConfig.Builder.class);
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for if and how to expose the node
* metadata to the workload running on the node.
* </pre>
*
* Protobuf enum {@code google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata}
*/
public enum NodeMetadata implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Not set.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
*
*
* <pre>
* Prevent workloads not in hostNetwork from accessing certain VM metadata,
* specifically kube-env, which contains Kubelet credentials, and the
* instance identity token.
*
* Metadata concealment is a temporary security solution available while the
* bootstrapping process for cluster nodes is being redesigned with
* significant security improvements. This feature is scheduled to be
* deprecated in the future and later removed.
* </pre>
*
* <code>SECURE = 1;</code>
*/
SECURE(1),
/**
*
*
* <pre>
* Expose all VM metadata to pods.
* </pre>
*
* <code>EXPOSE = 2;</code>
*/
EXPOSE(2),
/**
*
*
* <pre>
* Run the GKE Metadata Server on this node. The GKE Metadata Server exposes
* a metadata API to workloads that is compatible with the V1 Compute
* Metadata APIs exposed by the Compute Engine and App Engine Metadata
* Servers. This feature can only be enabled if Workload Identity is enabled
* at the cluster level.
* </pre>
*
* <code>GKE_METADATA_SERVER = 3;</code>
*/
GKE_METADATA_SERVER(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Not set.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Prevent workloads not in hostNetwork from accessing certain VM metadata,
* specifically kube-env, which contains Kubelet credentials, and the
* instance identity token.
*
* Metadata concealment is a temporary security solution available while the
* bootstrapping process for cluster nodes is being redesigned with
* significant security improvements. This feature is scheduled to be
* deprecated in the future and later removed.
* </pre>
*
* <code>SECURE = 1;</code>
*/
public static final int SECURE_VALUE = 1;
/**
*
*
* <pre>
* Expose all VM metadata to pods.
* </pre>
*
* <code>EXPOSE = 2;</code>
*/
public static final int EXPOSE_VALUE = 2;
/**
*
*
* <pre>
* Run the GKE Metadata Server on this node. The GKE Metadata Server exposes
* a metadata API to workloads that is compatible with the V1 Compute
* Metadata APIs exposed by the Compute Engine and App Engine Metadata
* Servers. This feature can only be enabled if Workload Identity is enabled
* at the cluster level.
* </pre>
*
* <code>GKE_METADATA_SERVER = 3;</code>
*/
public static final int GKE_METADATA_SERVER_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static NodeMetadata valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static NodeMetadata forNumber(int value) {
switch (value) {
case 0:
return UNSPECIFIED;
case 1:
return SECURE;
case 2:
return EXPOSE;
case 3:
return GKE_METADATA_SERVER;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<NodeMetadata> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<NodeMetadata> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<NodeMetadata>() {
public NodeMetadata findValueByNumber(int number) {
return NodeMetadata.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.container.v1beta1.WorkloadMetadataConfig.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final NodeMetadata[] VALUES = values();
public static NodeMetadata valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private NodeMetadata(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata)
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node.
* </pre>
*
* Protobuf enum {@code google.container.v1beta1.WorkloadMetadataConfig.Mode}
*/
public enum Mode implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Not set.
* </pre>
*
* <code>MODE_UNSPECIFIED = 0;</code>
*/
MODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Expose all Compute Engine metadata to pods.
* </pre>
*
* <code>GCE_METADATA = 1;</code>
*/
GCE_METADATA(1),
/**
*
*
* <pre>
* Run the GKE Metadata Server on this node. The GKE Metadata Server exposes
* a metadata API to workloads that is compatible with the V1 Compute
* Metadata APIs exposed by the Compute Engine and App Engine Metadata
* Servers. This feature can only be enabled if Workload Identity is enabled
* at the cluster level.
* </pre>
*
* <code>GKE_METADATA = 2;</code>
*/
GKE_METADATA(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Not set.
* </pre>
*
* <code>MODE_UNSPECIFIED = 0;</code>
*/
public static final int MODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Expose all Compute Engine metadata to pods.
* </pre>
*
* <code>GCE_METADATA = 1;</code>
*/
public static final int GCE_METADATA_VALUE = 1;
/**
*
*
* <pre>
* Run the GKE Metadata Server on this node. The GKE Metadata Server exposes
* a metadata API to workloads that is compatible with the V1 Compute
* Metadata APIs exposed by the Compute Engine and App Engine Metadata
* Servers. This feature can only be enabled if Workload Identity is enabled
* at the cluster level.
* </pre>
*
* <code>GKE_METADATA = 2;</code>
*/
public static final int GKE_METADATA_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Mode valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Mode forNumber(int value) {
switch (value) {
case 0:
return MODE_UNSPECIFIED;
case 1:
return GCE_METADATA;
case 2:
return GKE_METADATA;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Mode> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Mode> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Mode>() {
public Mode findValueByNumber(int number) {
return Mode.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.container.v1beta1.WorkloadMetadataConfig.getDescriptor()
.getEnumTypes()
.get(1);
}
private static final Mode[] VALUES = values();
public static Mode valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Mode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.container.v1beta1.WorkloadMetadataConfig.Mode)
}
public static final int NODE_METADATA_FIELD_NUMBER = 1;
private int nodeMetadata_ = 0;
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @return The enum numeric value on the wire for nodeMetadata.
*/
@java.lang.Override
@java.lang.Deprecated
public int getNodeMetadataValue() {
return nodeMetadata_;
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @return The nodeMetadata.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata getNodeMetadata() {
com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata result =
com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.forNumber(nodeMetadata_);
return result == null
? com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.UNRECOGNIZED
: result;
}
public static final int MODE_FIELD_NUMBER = 2;
private int mode_ = 0;
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @return The enum numeric value on the wire for mode.
*/
@java.lang.Override
public int getModeValue() {
return mode_;
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @return The mode.
*/
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig.Mode getMode() {
com.google.container.v1beta1.WorkloadMetadataConfig.Mode result =
com.google.container.v1beta1.WorkloadMetadataConfig.Mode.forNumber(mode_);
return result == null
? com.google.container.v1beta1.WorkloadMetadataConfig.Mode.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (nodeMetadata_
!= com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.UNSPECIFIED
.getNumber()) {
output.writeEnum(1, nodeMetadata_);
}
if (mode_
!= com.google.container.v1beta1.WorkloadMetadataConfig.Mode.MODE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, mode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (nodeMetadata_
!= com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, nodeMetadata_);
}
if (mode_
!= com.google.container.v1beta1.WorkloadMetadataConfig.Mode.MODE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, mode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.WorkloadMetadataConfig)) {
return super.equals(obj);
}
com.google.container.v1beta1.WorkloadMetadataConfig other =
(com.google.container.v1beta1.WorkloadMetadataConfig) obj;
if (nodeMetadata_ != other.nodeMetadata_) return false;
if (mode_ != other.mode_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NODE_METADATA_FIELD_NUMBER;
hash = (53 * hash) + nodeMetadata_;
hash = (37 * hash) + MODE_FIELD_NUMBER;
hash = (53 * hash) + mode_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.WorkloadMetadataConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1beta1.WorkloadMetadataConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* WorkloadMetadataConfig defines the metadata configuration to expose to
* workloads on the node pool.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.WorkloadMetadataConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.WorkloadMetadataConfig)
com.google.container.v1beta1.WorkloadMetadataConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_WorkloadMetadataConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_WorkloadMetadataConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.WorkloadMetadataConfig.class,
com.google.container.v1beta1.WorkloadMetadataConfig.Builder.class);
}
// Construct using com.google.container.v1beta1.WorkloadMetadataConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nodeMetadata_ = 0;
mode_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_WorkloadMetadataConfig_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig getDefaultInstanceForType() {
return com.google.container.v1beta1.WorkloadMetadataConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig build() {
com.google.container.v1beta1.WorkloadMetadataConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig buildPartial() {
com.google.container.v1beta1.WorkloadMetadataConfig result =
new com.google.container.v1beta1.WorkloadMetadataConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1beta1.WorkloadMetadataConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nodeMetadata_ = nodeMetadata_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.mode_ = mode_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.WorkloadMetadataConfig) {
return mergeFrom((com.google.container.v1beta1.WorkloadMetadataConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.WorkloadMetadataConfig other) {
if (other == com.google.container.v1beta1.WorkloadMetadataConfig.getDefaultInstance())
return this;
if (other.nodeMetadata_ != 0) {
setNodeMetadataValue(other.getNodeMetadataValue());
}
if (other.mode_ != 0) {
setModeValue(other.getModeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
nodeMetadata_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
mode_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int nodeMetadata_ = 0;
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @return The enum numeric value on the wire for nodeMetadata.
*/
@java.lang.Override
@java.lang.Deprecated
public int getNodeMetadataValue() {
return nodeMetadata_;
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @param value The enum numeric value on the wire for nodeMetadata to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setNodeMetadataValue(int value) {
nodeMetadata_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @return The nodeMetadata.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata getNodeMetadata() {
com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata result =
com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.forNumber(nodeMetadata_);
return result == null
? com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @param value The nodeMetadata to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setNodeMetadata(
com.google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
nodeMetadata_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* NodeMetadata is the configuration for how to expose metadata to the
* workloads running on the node.
* </pre>
*
* <code>
* .google.container.v1beta1.WorkloadMetadataConfig.NodeMetadata node_metadata = 1 [deprecated = true];
* </code>
*
* @deprecated google.container.v1beta1.WorkloadMetadataConfig.node_metadata is deprecated. See
* google/container/v1beta1/cluster_service.proto;l=5804
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearNodeMetadata() {
bitField0_ = (bitField0_ & ~0x00000001);
nodeMetadata_ = 0;
onChanged();
return this;
}
private int mode_ = 0;
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @return The enum numeric value on the wire for mode.
*/
@java.lang.Override
public int getModeValue() {
return mode_;
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @param value The enum numeric value on the wire for mode to set.
* @return This builder for chaining.
*/
public Builder setModeValue(int value) {
mode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @return The mode.
*/
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig.Mode getMode() {
com.google.container.v1beta1.WorkloadMetadataConfig.Mode result =
com.google.container.v1beta1.WorkloadMetadataConfig.Mode.forNumber(mode_);
return result == null
? com.google.container.v1beta1.WorkloadMetadataConfig.Mode.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @param value The mode to set.
* @return This builder for chaining.
*/
public Builder setMode(com.google.container.v1beta1.WorkloadMetadataConfig.Mode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
mode_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Mode is the configuration for how to expose metadata to workloads running
* on the node pool.
* </pre>
*
* <code>.google.container.v1beta1.WorkloadMetadataConfig.Mode mode = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMode() {
bitField0_ = (bitField0_ & ~0x00000002);
mode_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.WorkloadMetadataConfig)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.WorkloadMetadataConfig)
private static final com.google.container.v1beta1.WorkloadMetadataConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.WorkloadMetadataConfig();
}
public static com.google.container.v1beta1.WorkloadMetadataConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WorkloadMetadataConfig> PARSER =
new com.google.protobuf.AbstractParser<WorkloadMetadataConfig>() {
@java.lang.Override
public WorkloadMetadataConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WorkloadMetadataConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WorkloadMetadataConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.WorkloadMetadataConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 38,232 | nifi-extension-bundles/nifi-slack-bundle/nifi-slack-processors/src/main/java/org/apache/nifi/processors/slack/consume/ConsumeChannel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.slack.consume;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.slack.api.methods.SlackApiException;
import com.slack.api.methods.request.conversations.ConversationsHistoryRequest;
import com.slack.api.methods.request.conversations.ConversationsRepliesRequest;
import com.slack.api.methods.response.conversations.ConversationsHistoryResponse;
import com.slack.api.methods.response.conversations.ConversationsRepliesResponse;
import com.slack.api.model.Message;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.nifi.components.ConfigVerificationResult;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processors.slack.util.SlackResponseUtil;
public class ConsumeChannel {
private static final String CONVERSATION_HISTORY_URL = "https://slack.com/api/conversations.history";
private static final String CHECK_FOR_REPLIES = "check for replies";
private static final String BACKWARD = "backward";
private static final String FORWARD = "forward";
private static final Pattern MENTION_PATTERN = Pattern.compile("<@(U.*?)>");
private static final long YIELD_MILLIS = 3_000L;
private final ConsumeSlackClient client;
private final String channelId;
private final String channelName;
private final int batchSize;
private final long replyMonitorFrequencyMillis;
private final long replyMonitorWindowMillis;
private final boolean resolveUsernames;
private final boolean includeMessageBlocks;
private final UsernameLookup usernameLookup;
private final Relationship successRelationship;
private final ComponentLog logger;
private final ObjectMapper objectMapper;
private final StateKeys stateKeys;
private volatile long yieldExpiration;
private volatile long lastReplyMonitorPollEnd = System.currentTimeMillis();
private final AtomicLong nextRequestTime = new AtomicLong(0L);
private ConsumeChannel(final Builder builder) {
this.client = builder.client;
this.channelId = builder.channelId;
this.channelName = builder.channelName;
this.batchSize = builder.batchSize;
this.replyMonitorFrequencyMillis = builder.replyMonitorFrequencyMillis;
this.replyMonitorWindowMillis = builder.replyMonitorWindowMillis;
this.logger = builder.logger;
this.resolveUsernames = builder.resolveUsernames;
this.includeMessageBlocks = builder.includeMessageBlocks;
this.successRelationship = builder.successRelationship;
this.usernameLookup = builder.usernameLookup;
this.objectMapper = builder.objectMapper;
stateKeys = new StateKeys(channelId);
}
public String getChannelId() {
return channelId;
}
public ConfigVerificationResult verify() {
final ConversationsHistoryRequest request = ConversationsHistoryRequest.builder()
.channel(channelId)
.limit(1)
.build();
final ConversationsHistoryResponse response;
try {
response = client.fetchConversationsHistory(request);
} catch (final Exception e) {
return new ConfigVerificationResult.Builder()
.verificationStepName("Check authorization for Channel " + channelId)
.outcome(ConfigVerificationResult.Outcome.FAILED)
.explanation("Failed to obtain a message due to: " + e)
.build();
}
if (response.isOk()) {
final List<Message> messages = response.getMessages();
final Message firstMessage = messages.get(0);
enrichMessage(firstMessage);
final String username = firstMessage.getUsername();
if (resolveUsernames && username == null) {
return new ConfigVerificationResult.Builder()
.verificationStepName("Check authorization for Channel " + channelId)
.outcome(ConfigVerificationResult.Outcome.FAILED)
.explanation("Successfully retrieved a message but failed to resolve the username")
.build();
}
final String user = username == null ? firstMessage.getUser() : username;
final String explanation = response.getMessages().isEmpty() ? "Successfully requested messages for channel but got no messages" : "Successfully retrieved a message from " + user;
return new ConfigVerificationResult.Builder()
.verificationStepName("Check authorization for Channel " + channelId)
.outcome(ConfigVerificationResult.Outcome.SUCCESSFUL)
.explanation(explanation)
.build();
}
final String errorMessage = SlackResponseUtil.getErrorMessage(response.getError(), response.getNeeded(), response.getProvided(), response.getWarning());
return new ConfigVerificationResult.Builder()
.verificationStepName("Check authorization for Channel " + channelId)
.outcome(ConfigVerificationResult.Outcome.FAILED)
.explanation("Failed to obtain a message due to: " + errorMessage)
.build();
}
public void consume(final ProcessContext context, final ProcessSession session) throws IOException, SlackApiException {
final long minTimestamp = nextRequestTime.get();
if (minTimestamp > 0 && System.currentTimeMillis() < minTimestamp) {
context.yield();
return;
}
// Get the current state
final StateMap stateMap;
try {
stateMap = session.getState(Scope.CLUSTER);
} catch (final IOException ioe) {
logger.error("Failed to determine current offset for channel {}; will not retrieve any messages until this is resolved", channelId, ioe);
context.yield();
return;
}
// Determine if we need to check historical messages for replies, or if we need to consume the latest messages.
final boolean checkForReplies = isCheckForReplies(stateMap);
if (checkForReplies) {
consumeReplies(context, session, stateMap);
} else {
consumeLatestMessages(context, session, stateMap);
}
}
private boolean isCheckForReplies(final StateMap stateMap) {
final String currentAction = stateMap.get(stateKeys.ACTION);
if (CHECK_FOR_REPLIES.equals(currentAction)) {
return true;
}
final long nextCheckRepliesTime = lastReplyMonitorPollEnd + replyMonitorFrequencyMillis;
if (System.currentTimeMillis() > nextCheckRepliesTime) {
return true;
}
return false;
}
private void consumeReplies(final ProcessContext context, final ProcessSession session, final StateMap stateMap) throws IOException, SlackApiException {
// Make sure that we've completed our initial "load" of messages. If not, we want to load the messages before we start
// monitoring for updates to threads.
final String direction = stateMap.get(stateKeys.DIRECTION);
if (!FORWARD.equals(direction)) {
onCompletedRepliesScan(session, new HashMap<>(stateMap.toMap()), null);
return;
}
// We want to use the latest timestamp we've seen as the threshold for replies.
final String latestTs = stateMap.get(stateKeys.LATEST_TS);
if (latestTs == null) {
onCompletedRepliesScan(session, new HashMap<>(stateMap.toMap()), null);
return;
}
// If the action has not been set to denote that we're in teh process of checking for replies, do so now.
final Map<String, String> updatedStateMap = new HashMap<>(stateMap.toMap());
final String currentAction = stateMap.get(stateKeys.ACTION);
if (!CHECK_FOR_REPLIES.equals(currentAction)) {
updatedStateMap.put(stateKeys.ACTION, CHECK_FOR_REPLIES);
session.setState(updatedStateMap, Scope.CLUSTER);
}
String minTsValue = stateMap.get(stateKeys.REPLY_MIN_TS);
if (minTsValue == null) {
minTsValue = latestTs;
}
final String maxTsValue = stateMap.get(stateKeys.REPLY_MAX_TS);
final SlackTimestamp minTs = new SlackTimestamp(minTsValue);
final SlackTimestamp maxTs = maxTsValue == null ? new SlackTimestamp() : new SlackTimestamp(maxTsValue);
final SlackTimestamp maxParentTs = new SlackTimestamp(latestTs);
final String oldestThreadTs = new SlackTimestamp(System.currentTimeMillis() - replyMonitorWindowMillis).getRawValue();
String earliestThreadTs = stateMap.get(stateKeys.HISTORICAL_REPLIES_EARLIEST_THREAD_TS);
if (earliestThreadTs == null) {
earliestThreadTs = new SlackTimestamp(System.currentTimeMillis()).getRawValue();
}
String repliesCursor = stateMap.get(stateKeys.HISTORICAL_MESSAGES_REPLIES_CURSOR);
while (true) {
final ConversationsHistoryRequest request = ConversationsHistoryRequest.builder()
.channel(channelId)
.limit(500)
.latest(earliestThreadTs)
.oldest(oldestThreadTs)
.inclusive(true)
.build();
// Never write the messages because we are only interested in replies
final Predicate<Message> messageFilter = message -> false;
final Predicate<Message> replyFilter = reply -> {
final SlackTimestamp replyTs = new SlackTimestamp(reply.getTs());
// If the timestamp of the reply is newer than our cutoff, don't include it.
if (replyTs.afterOrEqualTo(maxTs)) {
return false;
}
// If the timestamp is before our min timestamp, we should have already output it.
if (replyTs.beforeOrEqualTo(minTs)) {
return false;
}
// If the parent message of the thread is newer than the latest parent message we've listed, ignore it.
// We'll output it the next time we output parent messages.
final SlackTimestamp replyThreadTs = new SlackTimestamp(reply.getThreadTs());
if (replyThreadTs.after(maxParentTs)) {
return false;
}
return true;
};
final ConsumptionResults results = consumeMessages(context, session, request, messageFilter, repliesCursor, minTs, replyFilter);
// If finished consuming replies, remove all keys related to this action
if (!results.isMore() && !results.isFailure()) {
onCompletedRepliesScan(session, updatedStateMap, maxTs);
return;
}
final SlackTimestamp earliest = results.getEarliestTimestamp();
earliestThreadTs = earliest == null ? null : earliest.getRawValue();
repliesCursor = results.getRepliesCursor();
if (earliestThreadTs == null) {
break;
}
// Update state
updatedStateMap.put(stateKeys.HISTORICAL_REPLIES_EARLIEST_THREAD_TS, earliestThreadTs);
if (repliesCursor != null) {
updatedStateMap.put(stateKeys.HISTORICAL_MESSAGES_REPLIES_CURSOR, repliesCursor);
}
session.setState(updatedStateMap, Scope.CLUSTER);
session.commitAsync();
if (!results.isContinuePolling()) {
break;
}
}
}
private void onCompletedRepliesScan(final ProcessSession session, final Map<String, String> updatedStateMap, final SlackTimestamp replyTsCutoff) throws IOException {
updatedStateMap.remove(stateKeys.ACTION);
updatedStateMap.remove(stateKeys.HISTORICAL_REPLIES_EARLIEST_THREAD_TS);
updatedStateMap.remove(stateKeys.HISTORICAL_MESSAGES_REPLIES_CURSOR);
updatedStateMap.remove(stateKeys.REPLY_MAX_TS);
if (replyTsCutoff != null) {
updatedStateMap.put(stateKeys.REPLY_MIN_TS, replyTsCutoff.getRawValue());
}
session.setState(updatedStateMap, Scope.CLUSTER);
lastReplyMonitorPollEnd = System.currentTimeMillis();
}
private void consumeLatestMessages(final ProcessContext context, final ProcessSession session, final StateMap stateMap) throws IOException, SlackApiException {
final String startingRepliesCursor = stateMap.get(stateKeys.LATEST_REPLIES_CURSOR);
String direction = stateMap.get(stateKeys.DIRECTION);
if (direction == null) {
direction = BACKWARD;
}
final String startingTimestampKey = BACKWARD.equals(direction) ? stateKeys.EARLIEST_TS : stateKeys.LATEST_TS;
String ts = stateMap.get(startingTimestampKey);
// If there's a cursor for replies, we want to include the last message as a duplicate, so that we can easily
// fetch its replies. We'll not write out the message itself, as it was already written in a previous FlowFile.
boolean includeLastMessage = startingRepliesCursor != null;
String repliesCursor = startingRepliesCursor;
final Map<String, String> updatedStateMap = new HashMap<>(stateMap.toMap());
while (true) {
// The logic here for building these requests gets complex, unfortunately
// When a request is made to Slack to retrieve Conversation History, it allows specifying
// the max number of messages and an oldest and/or latest timestamp. However, the messages that
// are returned are always in the order of newest to oldest. So, if we were to set the oldest timestamp
// to say 0, with a limit of 5, we would get the 5 newest messages - not the 5 oldest.
// But when the Processor first runs, we want the ability to retrieve all messages from a given channel.
// Because of that, we have to set the 'latest' timestamp on the request. Unfortunately, there's
// no way to start at the beginning and progress forward to the current time. So, instead, we must start
// at the current time and progress backward until we reach the oldest message in the channel. This is done
// by setting the latest timestamp to null, initially, and with every batch of messages received, update that
// to the earliest timestamp seen.
//
// However, once we've reached the beginning of the Channel history, we want to begin now only listing newer
// messages. So we set the direction to 'forward'. We then set the timestamp in the request such that there is
// no 'latest' and the 'oldest' timestamp is set to the most recent timestamp we've seen.
// So during the 'initial load' of messages we have to keep track of the direction (so that we know in the next
// invocation that we've not yet finished the initial load), the earliest timestamp that we've seen (so that
// we can continue going further back in history), and the latest timestamp that we've seen (so that we know
// where to start off once we finish going back and start going forward).
final ConversationsHistoryRequest request = ConversationsHistoryRequest.builder()
.channel(channelId)
.limit(batchSize)
.inclusive(includeLastMessage)
.build();
if (direction.equals(FORWARD)) {
request.setOldest(ts);
request.setLatest(null);
} else {
request.setOldest(null);
request.setLatest(ts);
}
final String firstMessageTs = ts;
final Predicate<Message> messageFilter = message -> !Objects.equals(message.getTs(), firstMessageTs);
final Predicate<Message> replyFilter = message -> true; // Include all replies
final ConsumptionResults results = consumeMessages(context, session, request, messageFilter, repliesCursor, null, replyFilter);
final String timestampKeyName;
final SlackTimestamp resultTimestamp;
if (direction.equals(FORWARD)) {
resultTimestamp = results.getLatestTimestamp();
timestampKeyName = stateKeys.LATEST_TS;
} else {
resultTimestamp = results.getEarliestTimestamp();
timestampKeyName = stateKeys.EARLIEST_TS;
}
if (resultTimestamp == null) {
break;
}
// Update state
ts = resultTimestamp.getRawValue();
repliesCursor = results.getRepliesCursor();
includeLastMessage = repliesCursor != null;
updatedStateMap.put(timestampKeyName, ts);
// If the latest timestamp hasn't yet been set, set it. This allows us to know the latest timestamp when
// we switch the direction from BACKWARD to FORWARD.
if (updatedStateMap.get(stateKeys.LATEST_TS) == null) {
final SlackTimestamp latestTimestamp = results.getLatestTimestamp();
updatedStateMap.put(stateKeys.LATEST_TS, latestTimestamp == null ? null : latestTimestamp.getRawValue());
}
if (repliesCursor != null) {
updatedStateMap.put(stateKeys.LATEST_REPLIES_CURSOR, repliesCursor);
}
// Set the direction to forward only once we reach the end of all messages
if (!results.isMore() && !results.isFailure()) {
updatedStateMap.put(stateKeys.DIRECTION, FORWARD);
// This key is only relevant during the initial loading of messages, when direction is BACKWARD.
updatedStateMap.remove(stateKeys.EARLIEST_TS);
logger.info("Successfully completed initial load of messages for channel {}", channelId);
}
session.setState(updatedStateMap, Scope.CLUSTER);
session.commitAsync();
if (!results.isContinuePolling()) {
break;
}
}
}
private ConsumptionResults consumeMessages(final ProcessContext context, final ProcessSession session, final ConversationsHistoryRequest request, final Predicate<Message> messageFilter,
final String startingRepliesCursor, final SlackTimestamp oldestReplyTs, final Predicate<Message> replyFilter) throws IOException, SlackApiException {
// Gather slack conversation history
final ConversationsHistoryResponse response = client.fetchConversationsHistory(request);
if (!response.isOk()) {
final String error = SlackResponseUtil.getErrorMessage(response.getError(), response.getNeeded(), response.getProvided(), response.getWarning());
logger.error("Received unexpected response from Slack when attempting to retrieve messages for channel {}: {}", channelId, error);
context.yield();
return new StandardConsumptionResults(null, null, null, true, false, false);
}
// If no messages, we're done.
final List<Message> messages = response.getMessages();
if (messages.isEmpty()) {
logger.debug("Received no new messages from Slack for channel {}", channelId);
this.yield();
return new StandardConsumptionResults(null, null, null, false, false, false);
}
// Write the results out to a FlowFile. This includes optionally gathering the threaded messages / replies.
FlowFile flowFile = session.create();
int messageCount = 0;
PartialThreadException partialThreadException = null;
SlackTimestamp earliestTimestamp = null;
SlackTimestamp latestTimestamp = null;
try (final OutputStream out = session.write(flowFile);
final JsonGenerator generator = objectMapper.createGenerator(out)) {
generator.writeStartArray();
final Iterator<Message> messageItr = messages.iterator();
while (messageItr.hasNext()) {
final Message message = messageItr.next();
// Do not include the message if it's the channel oldest. In this case, we only have fetched it
// in order to make the code simpler so that we can just handle the next section, where we
// deal with replies.
boolean enrichFailed = false;
final boolean includeMessage = messageFilter.test(message);
if (includeMessage) {
// Slack appears to populate the 'team' field but not the channel for Messages for some reason. We need the channel to be populated
// in order to fetch replies, and it makes sense to have it populated, regardless. So we populate it ourselves.
final boolean success = enrichMessage(message);
enrichFailed = !success;
generator.writeObject(message);
messageCount++;
final SlackTimestamp msgTimestamp = new SlackTimestamp(message.getTs());
if (earliestTimestamp == null || msgTimestamp.before(earliestTimestamp)) {
earliestTimestamp = msgTimestamp;
}
if (latestTimestamp == null || msgTimestamp.after(latestTimestamp)) {
latestTimestamp = msgTimestamp;
}
} else {
messageItr.remove();
}
// Simple case is that we need to output only the message.
if (!SlackResponseUtil.hasReplies(message)) {
continue;
}
// Gather replies for the message. We handle the case of PartialThreadException
// carefully because we may well be rate limited, or there could be a server error, etc.
// In this case, we want to output the messages that were received, and keep track of the
// cursor so that we can gather the next chunk in the next iteration.
List<Message> replies;
try {
replies = fetchReplies(message, startingRepliesCursor, oldestReplyTs);
} catch (final PartialThreadException e) {
yieldOnException(e, channelId, message, context);
partialThreadException = e;
replies = e.getRetrieved();
}
// Write out each of the replies as an individual JSON message
for (final Message reply : replies) {
// The first message in the thread is the message itself. We don't want to include it again,
// but we check the timestamp to ensure that this is the case, since the documentation does
// not explicitly call this out.
if (reply.getTs().equals(message.getTs())) {
continue;
}
if (replyFilter.test(reply)) {
final boolean success = enrichMessage(reply);
enrichFailed = enrichFailed || !success;
generator.writeObject(reply);
}
}
messageCount += replies.size();
// If we encountered an Exception while pulling Threaded messages, or couldn't perform enrichment (which generally means rate limiting),
// stop iterating through messages and remove the rest from the list of Messages. This allows us to properly keep track of the offsets, etc.
if (partialThreadException != null || enrichFailed) {
while (messageItr.hasNext()) {
messageItr.next();
messageItr.remove();
}
break;
}
}
generator.writeEndArray();
}
if (!response.isHasMore()) {
this.yield();
}
if (messageCount == 0) {
session.remove(flowFile);
// We consider there to be more messages if an PartialThreadException was thrown, as it means there may be additional
// messages in the thread.
final boolean moreMessages = partialThreadException != null || response.isHasMore();
return new StandardConsumptionResults(null, null, null, partialThreadException != null, false, moreMessages);
}
// Determine attributes for outbound FlowFile
final Map<String, String> attributes = new HashMap<>();
attributes.put("slack.channel.id", channelId);
attributes.put("slack.channel.name", channelName);
attributes.put("slack.message.count", Integer.toString(messageCount));
attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json");
// Update provenance
flowFile = session.putAllAttributes(flowFile, attributes);
session.getProvenanceReporter().receive(flowFile, CONVERSATION_HISTORY_URL);
session.transfer(flowFile, successRelationship);
// Update state based on the next cursor, preferably, or the timestamp of the last message if either we didn't get back
// a cursor, or if we got back a cursor but our original query was made using the 'oldest' parameter.
String repliesCursor = null;
if (partialThreadException != null) {
repliesCursor = partialThreadException.getNextCursor();
}
final boolean hasMoreReplies = repliesCursor != null;
final boolean moreMessages = response.isHasMore() || hasMoreReplies;
final boolean continuePolling = partialThreadException == null && moreMessages;
return new StandardConsumptionResults(earliestTimestamp, latestTimestamp, repliesCursor, partialThreadException != null, continuePolling, moreMessages);
}
private boolean enrichMessage(final Message message) {
message.setChannel(channelId);
if (!includeMessageBlocks) {
message.setBlocks(null);
}
final AtomicBoolean lookupFailed = new AtomicBoolean(false);
if (resolveUsernames) {
if (message.getUsername() == null && message.getUser() != null) {
final String username = usernameLookup.getUsername(message.getUser());
if (username == null) {
lookupFailed.set(true);
}
message.setUsername(username);
}
final String text = message.getText();
if (text != null) {
final Matcher matcher = MENTION_PATTERN.matcher(text);
final String updatedText = matcher.replaceAll(matchResult -> {
final String id = matchResult.group(1);
final String username = usernameLookup.getUsername(id);
if (username == null) {
lookupFailed.set(true);
matchResult.group(0);
}
return "<@" + username + ">";
});
message.setText(updatedText);
}
}
return !lookupFailed.get();
}
private void yieldOnException(final PartialThreadException e, final String channelId, final Message message, final ProcessContext context) {
if (SlackResponseUtil.isRateLimited(e.getCause())) {
final int retryAfterSeconds = SlackResponseUtil.getRetryAfterSeconds(e);
logger.warn("Slack indicated that the Rate Limit has been exceeded when attempting to retrieve messages for channel {}; will continue in {} seconds",
channelId, retryAfterSeconds);
} else {
logger.error("Encountered unexpected response from Slack when retrieving replies to message with thread timestamp {} due to: {}",
message.getThreadTs(), e.getMessage(), e);
}
final int retryAfterSeconds = SlackResponseUtil.getRetryAfterSeconds(e);
final long timeOfNextRequest = System.currentTimeMillis() + (retryAfterSeconds * 1000L);
nextRequestTime.getAndUpdate(currentTime -> Math.max(currentTime, timeOfNextRequest));
context.yield();
}
private List<Message> fetchReplies(final Message message, final String startCursor, final SlackTimestamp oldestTs) throws SlackApiException, IOException, PartialThreadException {
final List<Message> replies = new ArrayList<>();
// If the message's latest reply is before our cutoff, don't bother polling for replies
if (oldestTs != null) {
final String latestReply = message.getLatestReply();
if (latestReply != null && new SlackTimestamp(latestReply).before(oldestTs)) {
return Collections.emptyList();
}
}
String cursor = startCursor;
while (true) {
final ConversationsRepliesRequest request = ConversationsRepliesRequest.builder()
.channel(channelId)
.ts(message.getThreadTs())
.includeAllMetadata(true)
.limit(1000)
.oldest(oldestTs == null ? null : oldestTs.getRawValue())
.cursor(cursor)
.build();
final ConversationsRepliesResponse response;
try {
response = client.fetchConversationsReplies(request);
} catch (final Exception e) {
if (replies.isEmpty()) {
throw e;
}
throw new PartialThreadException(replies, cursor, e);
}
if (!response.isOk()) {
final String errorMessage = SlackResponseUtil.getErrorMessage(response.getError(), response.getNeeded(), response.getProvided(), response.getWarning());
throw new PartialThreadException(replies, cursor, errorMessage);
}
replies.addAll(response.getMessages());
if (!response.isHasMore()) {
break;
}
cursor = response.getResponseMetadata().getNextCursor();
}
return replies;
}
public void yield() {
yieldExpiration = System.currentTimeMillis() + YIELD_MILLIS;
}
public boolean isYielded() {
final long expiration = this.yieldExpiration;
if (expiration == 0) {
return false;
}
if (System.currentTimeMillis() < yieldExpiration) {
return true;
}
// Reset yield expiration to 0 so that next time we don't need to make the system call to get current time.
yieldExpiration = 0L;
return false;
}
public static class Builder {
private ConsumeSlackClient client;
private String channelId;
private String channelName;
private boolean includeMessageBlocks;
private boolean resolveUsernames;
private int batchSize = 50;
private ComponentLog logger;
private Relationship successRelationship;
private UsernameLookup usernameLookup;
private long replyMonitorFrequencyMillis = TimeUnit.SECONDS.toMillis(60);
private long replyMonitorWindowMillis = TimeUnit.DAYS.toMillis(7);
private ObjectMapper objectMapper;
public Builder channelId(final String channelId) {
this.channelId = channelId;
return this;
}
public Builder channelName(final String channelName) {
this.channelName = channelName;
return this;
}
public Builder client(final ConsumeSlackClient client) {
this.client = client;
return this;
}
public Builder batchSize(final int batchSize) {
this.batchSize = batchSize;
return this;
}
public Builder logger(final ComponentLog logger) {
this.logger = logger;
return this;
}
public Builder replyMonitorFrequency(final long value, final TimeUnit timeUnit) {
this.replyMonitorFrequencyMillis = timeUnit.toMillis(value);
return this;
}
public Builder replyMonitorWindow(final long value, final TimeUnit timeUnit) {
this.replyMonitorWindowMillis = timeUnit.toMillis(value);
return this;
}
public Builder includeMessageBlocks(final boolean includeMessageBlocks) {
this.includeMessageBlocks = includeMessageBlocks;
return this;
}
public Builder resolveUsernames(final boolean resolveUsernames) {
this.resolveUsernames = resolveUsernames;
return this;
}
public Builder successRelationship(final Relationship relationship) {
this.successRelationship = relationship;
return this;
}
public Builder usernameLookup(final UsernameLookup lookup) {
this.usernameLookup = lookup;
return this;
}
public Builder objectMapper(final ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
return this;
}
public ConsumeChannel build() {
return new ConsumeChannel(this);
}
}
private interface ConsumptionResults {
SlackTimestamp getEarliestTimestamp();
SlackTimestamp getLatestTimestamp();
String getRepliesCursor();
boolean isFailure();
boolean isContinuePolling();
boolean isMore();
}
private static class StandardConsumptionResults implements ConsumptionResults {
private final SlackTimestamp earliestTimestamp;
private final SlackTimestamp latestTimestamp;
private final boolean failure;
private final boolean continuePolling;
private final String repliesCursor;
private final boolean isMore;
public StandardConsumptionResults(final SlackTimestamp earliestTimestamp, final SlackTimestamp latestTimestamp, final String repliesCursor,
final boolean failure, final boolean continuePolling, final boolean moreMessages) {
this.earliestTimestamp = earliestTimestamp;
this.latestTimestamp = latestTimestamp;
this.repliesCursor = repliesCursor;
this.failure = failure;
this.continuePolling = continuePolling;
this.isMore = moreMessages;
}
@Override
public SlackTimestamp getEarliestTimestamp() {
return earliestTimestamp;
}
@Override
public SlackTimestamp getLatestTimestamp() {
return latestTimestamp;
}
@Override
public String getRepliesCursor() {
return repliesCursor;
}
@Override
public boolean isFailure() {
return failure;
}
@Override
public boolean isContinuePolling() {
return continuePolling;
}
@Override
public boolean isMore() {
return isMore;
}
}
private static class StateKeys {
public final String ACTION;
public final String LATEST_TS;
public final String EARLIEST_TS;
public final String DIRECTION;
public final String LATEST_REPLIES_CURSOR;
public final String HISTORICAL_MESSAGES_REPLIES_CURSOR;
public final String HISTORICAL_REPLIES_EARLIEST_THREAD_TS;
public final String REPLY_MIN_TS;
public final String REPLY_MAX_TS;
public StateKeys(final String channelId) {
ACTION = channelId + ".action";
LATEST_TS = channelId + ".latest";
EARLIEST_TS = channelId + ".earliest";
DIRECTION = channelId + ".direction";
LATEST_REPLIES_CURSOR = channelId + ".latest.replies.cursor";
HISTORICAL_MESSAGES_REPLIES_CURSOR = channelId + ".historical.replies.cursor";
HISTORICAL_REPLIES_EARLIEST_THREAD_TS = channelId + ".historical.replies.ts";
REPLY_MIN_TS = channelId + ".historical.reply.min.ts";
REPLY_MAX_TS = channelId + ".historical.reply.max.ts";
}
}
}
|
googleapis/google-cloud-java | 37,809 | java-vmwareengine/proto-google-cloud-vmwareengine-v1/src/main/java/com/google/cloud/vmwareengine/v1/Vcenter.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vmwareengine/v1/vmwareengine_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vmwareengine.v1;
/**
*
*
* <pre>
* Details about a vCenter Server management appliance.
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.Vcenter}
*/
public final class Vcenter extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vmwareengine.v1.Vcenter)
VcenterOrBuilder {
private static final long serialVersionUID = 0L;
// Use Vcenter.newBuilder() to construct.
private Vcenter(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Vcenter() {
internalIp_ = "";
version_ = "";
state_ = 0;
fqdn_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Vcenter();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineResourcesProto
.internal_static_google_cloud_vmwareengine_v1_Vcenter_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineResourcesProto
.internal_static_google_cloud_vmwareengine_v1_Vcenter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.Vcenter.class,
com.google.cloud.vmwareengine.v1.Vcenter.Builder.class);
}
/**
*
*
* <pre>
* State of the appliance
* </pre>
*
* Protobuf enum {@code google.cloud.vmwareengine.v1.Vcenter.State}
*/
public enum State implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified appliance state. This is the default value.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The appliance is operational and can be used.
* </pre>
*
* <code>ACTIVE = 1;</code>
*/
ACTIVE(1),
/**
*
*
* <pre>
* The appliance is being deployed.
* </pre>
*
* <code>CREATING = 2;</code>
*/
CREATING(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified appliance state. This is the default value.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
public static final int STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The appliance is operational and can be used.
* </pre>
*
* <code>ACTIVE = 1;</code>
*/
public static final int ACTIVE_VALUE = 1;
/**
*
*
* <pre>
* The appliance is being deployed.
* </pre>
*
* <code>CREATING = 2;</code>
*/
public static final int CREATING_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static State valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static State forNumber(int value) {
switch (value) {
case 0:
return STATE_UNSPECIFIED;
case 1:
return ACTIVE;
case 2:
return CREATING;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<State>() {
public State findValueByNumber(int number) {
return State.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.Vcenter.getDescriptor().getEnumTypes().get(0);
}
private static final State[] VALUES = values();
public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private State(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.vmwareengine.v1.Vcenter.State)
}
public static final int INTERNAL_IP_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object internalIp_ = "";
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @return The internalIp.
*/
@java.lang.Override
public java.lang.String getInternalIp() {
java.lang.Object ref = internalIp_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
internalIp_ = s;
return s;
}
}
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @return The bytes for internalIp.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInternalIpBytes() {
java.lang.Object ref = internalIp_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
internalIp_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int STATE_FIELD_NUMBER = 5;
private int state_ = 0;
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter.State getState() {
com.google.cloud.vmwareengine.v1.Vcenter.State result =
com.google.cloud.vmwareengine.v1.Vcenter.State.forNumber(state_);
return result == null ? com.google.cloud.vmwareengine.v1.Vcenter.State.UNRECOGNIZED : result;
}
public static final int FQDN_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object fqdn_ = "";
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @return The fqdn.
*/
@java.lang.Override
public java.lang.String getFqdn() {
java.lang.Object ref = fqdn_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
fqdn_ = s;
return s;
}
}
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @return The bytes for fqdn.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFqdnBytes() {
java.lang.Object ref = fqdn_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
fqdn_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(internalIp_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, internalIp_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_);
}
if (state_ != com.google.cloud.vmwareengine.v1.Vcenter.State.STATE_UNSPECIFIED.getNumber()) {
output.writeEnum(5, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fqdn_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, fqdn_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(internalIp_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, internalIp_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_);
}
if (state_ != com.google.cloud.vmwareengine.v1.Vcenter.State.STATE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fqdn_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, fqdn_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vmwareengine.v1.Vcenter)) {
return super.equals(obj);
}
com.google.cloud.vmwareengine.v1.Vcenter other = (com.google.cloud.vmwareengine.v1.Vcenter) obj;
if (!getInternalIp().equals(other.getInternalIp())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (state_ != other.state_) return false;
if (!getFqdn().equals(other.getFqdn())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INTERNAL_IP_FIELD_NUMBER;
hash = (53 * hash) + getInternalIp().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
hash = (37 * hash) + FQDN_FIELD_NUMBER;
hash = (53 * hash) + getFqdn().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.Vcenter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vmwareengine.v1.Vcenter prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Details about a vCenter Server management appliance.
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.Vcenter}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vmwareengine.v1.Vcenter)
com.google.cloud.vmwareengine.v1.VcenterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineResourcesProto
.internal_static_google_cloud_vmwareengine_v1_Vcenter_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineResourcesProto
.internal_static_google_cloud_vmwareengine_v1_Vcenter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.Vcenter.class,
com.google.cloud.vmwareengine.v1.Vcenter.Builder.class);
}
// Construct using com.google.cloud.vmwareengine.v1.Vcenter.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
internalIp_ = "";
version_ = "";
state_ = 0;
fqdn_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vmwareengine.v1.VmwareengineResourcesProto
.internal_static_google_cloud_vmwareengine_v1_Vcenter_descriptor;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter getDefaultInstanceForType() {
return com.google.cloud.vmwareengine.v1.Vcenter.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter build() {
com.google.cloud.vmwareengine.v1.Vcenter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter buildPartial() {
com.google.cloud.vmwareengine.v1.Vcenter result =
new com.google.cloud.vmwareengine.v1.Vcenter(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vmwareengine.v1.Vcenter result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.internalIp_ = internalIp_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.version_ = version_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.state_ = state_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.fqdn_ = fqdn_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vmwareengine.v1.Vcenter) {
return mergeFrom((com.google.cloud.vmwareengine.v1.Vcenter) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vmwareengine.v1.Vcenter other) {
if (other == com.google.cloud.vmwareengine.v1.Vcenter.getDefaultInstance()) return this;
if (!other.getInternalIp().isEmpty()) {
internalIp_ = other.internalIp_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
if (!other.getFqdn().isEmpty()) {
fqdn_ = other.fqdn_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
internalIp_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 18
case 34:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 40:
{
state_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 40
case 50:
{
fqdn_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object internalIp_ = "";
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @return The internalIp.
*/
public java.lang.String getInternalIp() {
java.lang.Object ref = internalIp_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
internalIp_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @return The bytes for internalIp.
*/
public com.google.protobuf.ByteString getInternalIpBytes() {
java.lang.Object ref = internalIp_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
internalIp_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @param value The internalIp to set.
* @return This builder for chaining.
*/
public Builder setInternalIp(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
internalIp_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearInternalIp() {
internalIp_ = getDefaultInstance().getInternalIp();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Internal IP address of the appliance.
* </pre>
*
* <code>string internal_ip = 2;</code>
*
* @param value The bytes for internalIp to set.
* @return This builder for chaining.
*/
public Builder setInternalIpBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
internalIp_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Version of the appliance.
* </pre>
*
* <code>string version = 4;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int state_ = 0;
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter.State getState() {
com.google.cloud.vmwareengine.v1.Vcenter.State result =
com.google.cloud.vmwareengine.v1.Vcenter.State.forNumber(state_);
return result == null ? com.google.cloud.vmwareengine.v1.Vcenter.State.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(com.google.cloud.vmwareengine.v1.Vcenter.State value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The state of the appliance.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Vcenter.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000004);
state_ = 0;
onChanged();
return this;
}
private java.lang.Object fqdn_ = "";
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @return The fqdn.
*/
public java.lang.String getFqdn() {
java.lang.Object ref = fqdn_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
fqdn_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @return The bytes for fqdn.
*/
public com.google.protobuf.ByteString getFqdnBytes() {
java.lang.Object ref = fqdn_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
fqdn_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @param value The fqdn to set.
* @return This builder for chaining.
*/
public Builder setFqdn(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
fqdn_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearFqdn() {
fqdn_ = getDefaultInstance().getFqdn();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Fully qualified domain name of the appliance.
* </pre>
*
* <code>string fqdn = 6;</code>
*
* @param value The bytes for fqdn to set.
* @return This builder for chaining.
*/
public Builder setFqdnBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
fqdn_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vmwareengine.v1.Vcenter)
}
// @@protoc_insertion_point(class_scope:google.cloud.vmwareengine.v1.Vcenter)
private static final com.google.cloud.vmwareengine.v1.Vcenter DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vmwareengine.v1.Vcenter();
}
public static com.google.cloud.vmwareengine.v1.Vcenter getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Vcenter> PARSER =
new com.google.protobuf.AbstractParser<Vcenter>() {
@java.lang.Override
public Vcenter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Vcenter> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Vcenter> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Vcenter getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 38,176 | jdk/src/share/classes/sun/tools/java/Environment.java | /*
* Copyright (c) 1994, 2003, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.java;
import java.util.Stack;
import java.io.IOException;
import sun.tools.tree.Context;
//JCOV
import java.io.File;
//end JCOV
/**
* This class defines the environment for a compilation.
* It is used to load classes, resolve class names and
* report errors. It is an abstract class, a subclass
* must define implementations for some of the functions.<p>
*
* An environment has a source object associated with it.
* This is the thing against which errors are reported, it
* is usually a file name, a field or a class.<p>
*
* Environments can be nested to change the source object.<p>
*
* WARNING: The contents of this source file are not part of any
* supported API. Code that depends on them does so at its own risk:
* they are subject to change or removal without notice.
*
* @author Arthur van Hoff
*/
public class Environment implements Constants {
/**
* The actual environment to which everything is forwarded.
*/
Environment env;
/**
* External character encoding name
*/
String encoding;
/**
* The object that is currently being parsed/compiled.
* It is either a file name (String) or a field (MemberDefinition)
* or a class (ClassDeclaration or ClassDefinition).
*/
Object source;
public Environment(Environment env, Object source) {
if (env != null && env.env != null && env.getClass() == this.getClass())
env = env.env; // a small optimization
this.env = env;
this.source = source;
}
public Environment() {
this(null, null);
}
/**
* Tells whether an Identifier refers to a package which should be
* exempt from the "exists" check in Imports#resolve().
*/
public boolean isExemptPackage(Identifier id) {
return env.isExemptPackage(id);
}
/**
* Return a class declaration given a fully qualified class name.
*/
public ClassDeclaration getClassDeclaration(Identifier nm) {
return env.getClassDeclaration(nm);
}
/**
* Return a class definition given a fully qualified class name.
* <p>
* Should be called only with 'internal' class names, i.e., the result
* of a call to 'resolveName' or a synthetic class name.
*/
public final ClassDefinition getClassDefinition(Identifier nm) throws ClassNotFound {
if (nm.isInner()) {
ClassDefinition c = getClassDefinition(nm.getTopName());
Identifier tail = nm.getFlatName();
walkTail:
while (tail.isQualified()) {
tail = tail.getTail();
Identifier head = tail.getHead();
//System.out.println("CLASS: " + c + " HEAD: " + head + " TAIL: " + tail);
String hname = head.toString();
// If the name is of the form 'ClassName.N$localName', where N is
// a number, the field 'N$localName' may not necessarily be a member
// of the class named by 'ClassName', but might be a member of some
// inaccessible class contained within it. We use 'getLocalClass'
// to do the lookup in this case. This is part of a fix for bugid
// 4054523 and 4030421. See also 'BatchEnvironment.makeClassDefinition'.
// This should also work for anonymous class names of the form
// 'ClassName.N'. Note that the '.' qualifications get converted to
// '$' characters when determining the external name of the class and
// the name of the class file.
if (hname.length() > 0
&& Character.isDigit(hname.charAt(0))) {
ClassDefinition localClass = c.getLocalClass(hname);
if (localClass != null) {
c = localClass;
continue walkTail;
}
} else {
for (MemberDefinition f = c.getFirstMatch(head);
f != null; f = f.getNextMatch()) {
if (f.isInnerClass()) {
c = f.getInnerClass();
continue walkTail;
}
}
}
throw new ClassNotFound(Identifier.lookupInner(c.getName(), head));
}
//System.out.println("FOUND " + c + " FOR " + nm);
return c;
}
return getClassDeclaration(nm).getClassDefinition(this);
}
/**
* Return a class declaration given a type. Only works for
* class types.
*/
public ClassDeclaration getClassDeclaration(Type t) {
return getClassDeclaration(t.getClassName());
}
/**
* Return a class definition given a type. Only works for
* class types.
*/
public final ClassDefinition getClassDefinition(Type t) throws ClassNotFound {
return getClassDefinition(t.getClassName());
}
/**
* Check if a class exists (without actually loading it).
* (Since inner classes cannot in general be examined without
* loading source, this method does not accept inner names.)
*/
public boolean classExists(Identifier nm) {
return env.classExists(nm);
}
public final boolean classExists(Type t) {
return !t.isType(TC_CLASS) || classExists(t.getClassName());
}
/**
* Get the package path for a package
*/
public Package getPackage(Identifier pkg) throws IOException {
return env.getPackage(pkg);
}
/**
* Load the definition of a class.
*/
public void loadDefinition(ClassDeclaration c) {
env.loadDefinition(c);
}
/**
* Return the source of the environment (ie: the thing being compiled/parsed).
*/
public final Object getSource() {
return source;
}
/**
* Resolve a type. Make sure that all the classes referred to by
* the type have a definition. Report errors. Return true if
* the type is well-formed. Presently used for types appearing
* in member declarations, which represent named types internally as
* qualified identifiers. Type names appearing in local variable
* declarations and within expressions are represented as identifier
* or field expressions, and are resolved by 'toType', which delegates
* handling of the non-inner portion of the name to this method.
* <p>
* In 'toType', the various stages of qualification are represented by
* separate AST nodes. Here, we are given a single identifier which
* contains the entire qualification structure. It is not possible in
* general to set the error location to the exact position of a component
* that is in error, so an error message must refer to the entire qualified
* name. An attempt to keep track of the string length of the components of
* the name and to offset the location accordingly fails because the initial
* prefix of the name may have been rewritten by an earlier call to
* 'resolveName'. See 'SourceMember.resolveTypeStructure'. The situation
* is actually even worse than this, because only a single location is
* passed in for an entire declaration, which may contain many type names.
* All error messages are thus poorly localized. These checks should be
* done while traversing the parse tree for the type, not the type descriptor.
* <p>
* DESIGN NOTE:
* As far as I can tell, the two-stage resolution of names represented in
* string form is an artifact of the late implementation of inner classes
* and the use of mangled names internally within the compiler. All
* qualified names should have their hiearchical structure made explicit
* in the parse tree at the phase at which they are presented for static
* semantic checking. This would affect class names appearing in 'extends',
* 'implements', and 'throws' clauses, as well as in member declarations.
*/
public boolean resolve(long where, ClassDefinition c, Type t) {
switch (t.getTypeCode()) {
case TC_CLASS: {
ClassDefinition def;
try {
Identifier nm = t.getClassName();
if (!nm.isQualified() && !nm.isInner() && !classExists(nm)) {
resolve(nm); // elicit complaints about ambiguity
}
def = getQualifiedClassDefinition(where, nm, c, false);
if (!c.canAccess(this, def.getClassDeclaration())) {
// Reported error location may be imprecise
// if the name is qualified.
error(where, "cant.access.class", def);
return true; // return false later
}
def.noteUsedBy(c, where, env);
} catch (AmbiguousClass ee) {
error(where, "ambig.class", ee.name1, ee.name2);
return false;
} catch (ClassNotFound e) {
// For now, report "class.and.package" only when the code
// is going to fail anyway.
try {
if (e.name.isInner() &&
getPackage(e.name.getTopName()).exists()) {
env.error(where, "class.and.package",
e.name.getTopName());
}
} catch (IOException ee) {
env.error(where, "io.exception", "package check");
}
// This error message is also emitted for 'new' expressions.
// error(where, "class.not.found", e.name, "declaration");
error(where, "class.not.found.no.context", e.name);
return false;
}
return true;
}
case TC_ARRAY:
return resolve(where, c, t.getElementType());
case TC_METHOD:
boolean ok = resolve(where, c, t.getReturnType());
Type args[] = t.getArgumentTypes();
for (int i = args.length ; i-- > 0 ; ) {
ok &= resolve(where, c, args[i]);
}
return ok;
}
return true;
}
/**
* Given its fully-qualified name, verify that a class is defined and accessible.
* Used to check components of qualified names in contexts where a class is expected.
* Like 'resolve', but is given a single type name, not a type descriptor.
*/
public boolean resolveByName(long where, ClassDefinition c, Identifier nm) {
return resolveByName(where, c, nm, false);
}
public boolean resolveExtendsByName(long where, ClassDefinition c, Identifier nm) {
return resolveByName(where, c, nm, true);
}
private boolean resolveByName(long where, ClassDefinition c,
Identifier nm, boolean isExtends) {
ClassDefinition def;
try {
if (!nm.isQualified() && !nm.isInner() && !classExists(nm)) {
resolve(nm); // elicit complaints about ambiguity
}
def = getQualifiedClassDefinition(where, nm, c, isExtends);
ClassDeclaration decl = def.getClassDeclaration();
if (!((!isExtends && c.canAccess(this, decl))
||
(isExtends && c.extendsCanAccess(this, decl)))) {
error(where, "cant.access.class", def);
return true; // return false later
}
} catch (AmbiguousClass ee) {
error(where, "ambig.class", ee.name1, ee.name2);
return false;
} catch (ClassNotFound e) {
// For now, report "class.and.package" only when the code
// is going to fail anyway.
try {
if (e.name.isInner() &&
getPackage(e.name.getTopName()).exists()) {
env.error(where, "class.and.package",
e.name.getTopName());
}
} catch (IOException ee) {
env.error(where, "io.exception", "package check");
}
error(where, "class.not.found", e.name, "type name");
return false;
}
return true;
}
/**
* Like 'getClassDefinition(env)', but check access on each component.
* Currently called only by 'resolve' above. It is doubtful that calls
* to 'getClassDefinition(env)' are appropriate now.
*/
public final ClassDefinition
getQualifiedClassDefinition(long where,
Identifier nm,
ClassDefinition ctxClass,
boolean isExtends) throws ClassNotFound {
if (nm.isInner()) {
ClassDefinition c = getClassDefinition(nm.getTopName());
Identifier tail = nm.getFlatName();
walkTail:
while (tail.isQualified()) {
tail = tail.getTail();
Identifier head = tail.getHead();
// System.out.println("CLASS: " + c + " HEAD: " + head + " TAIL: " + tail);
String hname = head.toString();
// Handle synthesized names of local and anonymous classes.
// See 'getClassDefinition(env)' above.
if (hname.length() > 0
&& Character.isDigit(hname.charAt(0))) {
ClassDefinition localClass = c.getLocalClass(hname);
if (localClass != null) {
c = localClass;
continue walkTail;
}
} else {
for (MemberDefinition f = c.getFirstMatch(head);
f != null; f = f.getNextMatch()) {
if (f.isInnerClass()) {
ClassDeclaration rdecl = c.getClassDeclaration();
c = f.getInnerClass();
ClassDeclaration fdecl = c.getClassDeclaration();
// This check is presumably applicable even if the
// original source-code name (expanded by 'resolveNames')
// was a simple, unqualified name. Hopefully, JLS 2e
// will clarify the matter.
if ((!isExtends
&& !ctxClass.canAccess(env, fdecl))
||
(isExtends
&& !ctxClass.extendsCanAccess(env, fdecl))) {
// Reported error location is imprecise.
env.error(where, "no.type.access", head, rdecl, ctxClass);
}
// The JLS 6.6.2 restrictions on access to protected members
// depend in an essential way upon the syntactic form of the name.
// Since the compiler has previously expanded the class names
// here into fully-qualified form ('resolveNames'), this check
// cannot be performed here. Unfortunately, the original names
// are clobbered during 'basicCheck', which is also the phase that
// resolves the inheritance structure, required to implement the
// access restrictions. Pending a large-scale revision of the
// name-resolution machinery, we forgo this check, with the result
// that the JLS 6.6.2 restrictions are not enforced for some cases
// of qualified access to inner classes. Some qualified names are
// resolved elsewhere via a different mechanism, and will be
// treated correctly -- see 'FieldExpression.checkCommon'.
/*---------------------------------------*
if (f.isProtected()) {
Type rty = Type.tClass(rdecl.getName()); // hack
if (!ctxClass.protectedAccess(env, f, rty)) {
// Reported error location is imprecise.
env.error(where, "invalid.protected.type.use",
head, ctxClass, rty);
}
}
*---------------------------------------*/
continue walkTail;
}
}
}
throw new ClassNotFound(Identifier.lookupInner(c.getName(), head));
}
//System.out.println("FOUND " + c + " FOR " + nm);
return c;
}
return getClassDeclaration(nm).getClassDefinition(this);
}
/**
* Resolve the names within a type, returning the adjusted type.
* Adjust class names to reflect scoping.
* Do not report errors.
* <p>
* NOTE: It would be convenient to check for errors here, such as
* verifying that each component of a qualified name exists and is
* accessible. Why must this be done in a separate phase?
* <p>
* If the 'synth' argument is true, indicating that the member whose
* type is being resolved is synthetic, names are resolved with respect
* to the package scope. (Fix for 4097882)
*/
public Type resolveNames(ClassDefinition c, Type t, boolean synth) {
if (tracing) dtEvent("Environment.resolveNames: " + c + ", " + t);
switch (t.getTypeCode()) {
case TC_CLASS: {
Identifier name = t.getClassName();
Identifier rname;
if (synth) {
rname = resolvePackageQualifiedName(name);
} else {
rname = c.resolveName(this, name);
}
if (name != rname) {
t = Type.tClass(rname);
}
break;
}
case TC_ARRAY:
t = Type.tArray(resolveNames(c, t.getElementType(), synth));
break;
case TC_METHOD: {
Type ret = t.getReturnType();
Type rret = resolveNames(c, ret, synth);
Type args[] = t.getArgumentTypes();
Type rargs[] = new Type[args.length];
boolean changed = (ret != rret);
for (int i = args.length ; i-- > 0 ; ) {
Type arg = args[i];
Type rarg = resolveNames(c, arg, synth);
rargs[i] = rarg;
if (arg != rarg) {
changed = true;
}
}
if (changed) {
t = Type.tMethod(rret, rargs);
}
break;
}
}
return t;
}
/**
* Resolve a class name, using only package and import directives.
* Report no errors.
* <p>
*/
public Identifier resolveName(Identifier name) {
// This logic is pretty exactly parallel to that of
// ClassDefinition.resolveName().
if (name.isQualified()) {
// Try to resolve the first identifier component,
// because inner class names take precedence over
// package prefixes. (Cf. ClassDefinition.resolveName.)
Identifier rhead = resolveName(name.getHead());
if (rhead.hasAmbigPrefix()) {
// The first identifier component refers to an
// ambiguous class. Limp on. We throw away the
// rest of the classname as it is irrelevant.
// (part of solution for 4059855).
return rhead;
}
if (!this.classExists(rhead)) {
return this.resolvePackageQualifiedName(name);
}
try {
return this.getClassDefinition(rhead).
resolveInnerClass(this, name.getTail());
} catch (ClassNotFound ee) {
// return partially-resolved name someone else can fail on
return Identifier.lookupInner(rhead, name.getTail());
}
}
try {
return resolve(name);
} catch (AmbiguousClass ee) {
// Don't force a resolution of the name if it is ambiguous.
// Forcing the resolution would tack the current package
// name onto the front of the class, which would be wrong.
// Instead, mark the name as ambiguous and let a later stage
// find the error by calling env.resolve(name).
// (part of solution for 4059855).
if (name.hasAmbigPrefix()) {
return name;
} else {
return name.addAmbigPrefix();
}
} catch (ClassNotFound ee) {
// last chance to make something halfway sensible
Imports imports = getImports();
if (imports != null)
return imports.forceResolve(this, name);
}
return name;
}
/**
* Discover if name consists of a package prefix, followed by the
* name of a class (that actually exists), followed possibly by
* some inner class names. If we can't find a class that exists,
* return the name unchanged.
* <p>
* This routine is used after a class name fails to
* be resolved by means of imports or inner classes.
* However, import processing uses this routine directly,
* since import names must be exactly qualified to start with.
*/
public final Identifier resolvePackageQualifiedName(Identifier name) {
Identifier tail = null;
for (;;) {
if (classExists(name)) {
break;
}
if (!name.isQualified()) {
name = (tail == null) ? name : Identifier.lookup(name, tail);
tail = null;
break;
}
Identifier nm = name.getName();
tail = (tail == null)? nm: Identifier.lookup(nm, tail);
name = name.getQualifier();
}
if (tail != null)
name = Identifier.lookupInner(name, tail);
return name;
}
/**
* Resolve a class name, using only package and import directives.
*/
public Identifier resolve(Identifier nm) throws ClassNotFound {
if (env == null) return nm; // a pretty useless no-op
return env.resolve(nm);
}
/**
* Get the imports used to resolve class names.
*/
public Imports getImports() {
if (env == null) return null; // lame default
return env.getImports();
}
/**
* Create a new class.
*/
public ClassDefinition makeClassDefinition(Environment origEnv, long where,
IdentifierToken name,
String doc, int modifiers,
IdentifierToken superClass,
IdentifierToken interfaces[],
ClassDefinition outerClass) {
if (env == null) return null; // lame default
return env.makeClassDefinition(origEnv, where, name,
doc, modifiers,
superClass, interfaces, outerClass);
}
/**
* Create a new field.
*/
public MemberDefinition makeMemberDefinition(Environment origEnv, long where,
ClassDefinition clazz,
String doc, int modifiers,
Type type, Identifier name,
IdentifierToken argNames[],
IdentifierToken expIds[],
Object value) {
if (env == null) return null; // lame default
return env.makeMemberDefinition(origEnv, where, clazz, doc, modifiers,
type, name, argNames, expIds, value);
}
/**
* Returns true if the given method is applicable to the given arguments
*/
public boolean isApplicable(MemberDefinition m, Type args[]) throws ClassNotFound {
Type mType = m.getType();
if (!mType.isType(TC_METHOD))
return false;
Type mArgs[] = mType.getArgumentTypes();
if (args.length != mArgs.length)
return false;
for (int i = args.length ; --i >= 0 ;)
if (!isMoreSpecific(args[i], mArgs[i]))
return false;
return true;
}
/**
* Returns true if "best" is in every argument at least as good as "other"
*/
public boolean isMoreSpecific(MemberDefinition best, MemberDefinition other)
throws ClassNotFound {
Type bestType = best.getClassDeclaration().getType();
Type otherType = other.getClassDeclaration().getType();
boolean result = isMoreSpecific(bestType, otherType)
&& isApplicable(other, best.getType().getArgumentTypes());
// System.out.println("isMoreSpecific: " + best + "/" + other
// + " => " + result);
return result;
}
/**
* Returns true if "from" is a more specific type than "to"
*/
public boolean isMoreSpecific(Type from, Type to) throws ClassNotFound {
return implicitCast(from, to);
}
/**
* Return true if an implicit cast from this type to
* the given type is allowed.
*/
public boolean implicitCast(Type from, Type to) throws ClassNotFound {
if (from == to)
return true;
int toTypeCode = to.getTypeCode();
switch(from.getTypeCode()) {
case TC_BYTE:
if (toTypeCode == TC_SHORT)
return true;
case TC_SHORT:
case TC_CHAR:
if (toTypeCode == TC_INT) return true;
case TC_INT:
if (toTypeCode == TC_LONG) return true;
case TC_LONG:
if (toTypeCode == TC_FLOAT) return true;
case TC_FLOAT:
if (toTypeCode == TC_DOUBLE) return true;
case TC_DOUBLE:
default:
return false;
case TC_NULL:
return to.inMask(TM_REFERENCE);
case TC_ARRAY:
if (!to.isType(TC_ARRAY)) {
return (to == Type.tObject || to == Type.tCloneable
|| to == Type.tSerializable);
} else {
// both are arrays. recurse down both until one isn't an array
do {
from = from.getElementType();
to = to.getElementType();
} while (from.isType(TC_ARRAY) && to.isType(TC_ARRAY));
if ( from.inMask(TM_ARRAY|TM_CLASS)
&& to.inMask(TM_ARRAY|TM_CLASS)) {
return isMoreSpecific(from, to);
} else {
return (from.getTypeCode() == to.getTypeCode());
}
}
case TC_CLASS:
if (toTypeCode == TC_CLASS) {
ClassDefinition fromDef = getClassDefinition(from);
ClassDefinition toDef = getClassDefinition(to);
return toDef.implementedBy(this,
fromDef.getClassDeclaration());
} else {
return false;
}
}
}
/**
* Return true if an explicit cast from this type to
* the given type is allowed.
*/
public boolean explicitCast(Type from, Type to) throws ClassNotFound {
if (implicitCast(from, to)) {
return true;
}
if (from.inMask(TM_NUMBER)) {
return to.inMask(TM_NUMBER);
}
if (from.isType(TC_CLASS) && to.isType(TC_CLASS)) {
ClassDefinition fromClass = getClassDefinition(from);
ClassDefinition toClass = getClassDefinition(to);
if (toClass.isFinal()) {
return fromClass.implementedBy(this,
toClass.getClassDeclaration());
}
if (fromClass.isFinal()) {
return toClass.implementedBy(this,
fromClass.getClassDeclaration());
}
// The code here used to omit this case. If both types
// involved in a cast are interfaces, then JLS 5.5 requires
// that we do a simple test -- make sure none of the methods
// in toClass and fromClass have the same signature but
// different return types. (bug number 4028359)
if (toClass.isInterface() && fromClass.isInterface()) {
return toClass.couldImplement(fromClass);
}
return toClass.isInterface() ||
fromClass.isInterface() ||
fromClass.superClassOf(this, toClass.getClassDeclaration());
}
if (to.isType(TC_ARRAY)) {
if (from.isType(TC_ARRAY)) {
Type t1 = from.getElementType();
Type t2 = to.getElementType();
while ((t1.getTypeCode() == TC_ARRAY)
&& (t2.getTypeCode() == TC_ARRAY)) {
t1 = t1.getElementType();
t2 = t2.getElementType();
}
if (t1.inMask(TM_ARRAY|TM_CLASS) &&
t2.inMask(TM_ARRAY|TM_CLASS)) {
return explicitCast(t1, t2);
}
} else if (from == Type.tObject || from == Type.tCloneable
|| from == Type.tSerializable)
return true;
}
return false;
}
/**
* Flags.
*/
public int getFlags() {
return env.getFlags();
}
/**
* Debugging flags. There used to be a method debug()
* that has been replaced because -g has changed meaning
* (it now cooperates with -O and line number, variable
* range and source file info can be toggled separately).
*/
public final boolean debug_lines() {
return (getFlags() & F_DEBUG_LINES) != 0;
}
public final boolean debug_vars() {
return (getFlags() & F_DEBUG_VARS) != 0;
}
public final boolean debug_source() {
return (getFlags() & F_DEBUG_SOURCE) != 0;
}
/**
* Optimization flags. There used to be a method optimize()
* that has been replaced because -O has changed meaning in
* javac to be replaced with -O and -O:interclass.
*/
public final boolean opt() {
return (getFlags() & F_OPT) != 0;
}
public final boolean opt_interclass() {
return (getFlags() & F_OPT_INTERCLASS) != 0;
}
/**
* Verbose
*/
public final boolean verbose() {
return (getFlags() & F_VERBOSE) != 0;
}
/**
* Dump debugging stuff
*/
public final boolean dump() {
return (getFlags() & F_DUMP) != 0;
}
/**
* Verbose
*/
public final boolean warnings() {
return (getFlags() & F_WARNINGS) != 0;
}
/**
* Dependencies
*/
public final boolean dependencies() {
return (getFlags() & F_DEPENDENCIES) != 0;
}
/**
* Print Dependencies to stdout
*/
public final boolean print_dependencies() {
return (getFlags() & F_PRINT_DEPENDENCIES) != 0;
}
/**
* Deprecation warnings are enabled.
*/
public final boolean deprecation() {
return (getFlags() & F_DEPRECATION) != 0;
}
/**
* Do not support virtual machines before version 1.2.
* This option is not supported and is only here for testing purposes.
*/
public final boolean version12() {
return (getFlags() & F_VERSION12) != 0;
}
/**
* Floating point is strict by default
*/
public final boolean strictdefault() {
return (getFlags() & F_STRICTDEFAULT) != 0;
}
/**
* Release resources, if any.
*/
public void shutdown() {
if (env != null) {
env.shutdown();
}
}
/**
* Issue an error.
* source - the input source, usually a file name string
* offset - the offset in the source of the error
* err - the error number (as defined in this interface)
* arg1 - an optional argument to the error (null if not applicable)
* arg2 - a second optional argument to the error (null if not applicable)
* arg3 - a third optional argument to the error (null if not applicable)
*/
public void error(Object source, long where, String err, Object arg1, Object arg2, Object arg3) {
env.error(source, where, err, arg1, arg2, arg3);
}
public final void error(long where, String err, Object arg1, Object arg2, Object arg3) {
error(source, where, err, arg1, arg2, arg3);
}
public final void error(long where, String err, Object arg1, Object arg2) {
error(source, where, err, arg1, arg2, null);
}
public final void error(long where, String err, Object arg1) {
error(source, where, err, arg1, null, null);
}
public final void error(long where, String err) {
error(source, where, err, null, null, null);
}
/**
* Output a string. This can either be an error message or something
* for debugging. This should be used instead of println.
*/
public void output(String msg) {
env.output(msg);
}
private static boolean debugging = (System.getProperty("javac.debug") != null);
public static void debugOutput(Object msg) {
if (Environment.debugging)
System.out.println(msg.toString());
}
/**
* set character encoding name
*/
public void setCharacterEncoding(String encoding) {
this.encoding = encoding;
}
/**
* Return character encoding name
*/
public String getCharacterEncoding() {
return encoding;
}
/**
* Return major version to use in generated class files.
*/
public short getMajorVersion() {
if (env==null) return JAVA_DEFAULT_VERSION; // needed for javah
return env.getMajorVersion();
}
/**
* Return minor version to use in generated class files.
*/
public short getMinorVersion() {
if (env==null) return JAVA_DEFAULT_MINOR_VERSION; // needed for javah
return env.getMinorVersion();
}
// JCOV
/**
* get coverage flag
*/
public final boolean coverage() {
return (getFlags() & F_COVERAGE) != 0;
}
/**
* get flag of generation the coverage data file
*/
public final boolean covdata() {
return (getFlags() & F_COVDATA) != 0;
}
/**
* Return the coverage data file
*/
public File getcovFile() {
return env.getcovFile();
}
// end JCOV
/**
* Debug tracing.
* Currently, this code is used only for tracing the loading and
* checking of classes, particularly the demand-driven aspects.
* This code should probably be integrated with 'debugOutput' above,
* but we need to give more thought to the issue of classifying debugging
* messages and allowing those only those of interest to be enabled.
*
* Calls to these methods are generally conditioned on the final variable
* 'Constants.tracing', which allows the calls to be completely omitted
* in a production release to avoid space and time overhead.
*/
private static boolean dependtrace =
(System.getProperty("javac.trace.depend") != null);
public void dtEnter(String s) {
if (dependtrace) System.out.println(">>> " + s);
}
public void dtExit(String s) {
if (dependtrace) System.out.println("<<< " + s);
}
public void dtEvent(String s) {
if (dependtrace) System.out.println(s);
}
/**
* Enable diagnostic dump of class modifier bits, including those
* in InnerClasses attributes, as they are written to the classfile.
* In the future, may also enable dumping field and method modifiers.
*/
private static boolean dumpmodifiers =
(System.getProperty("javac.dump.modifiers") != null);
public boolean dumpModifiers() { return dumpmodifiers; }
}
|
oracle/graal | 38,239 | truffle/src/com.oracle.truffle.api.bytecode.test/src/com/oracle/truffle/api/bytecode/test/CustomYieldTest.java | /*
* Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.api.bytecode.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.graalvm.polyglot.Context;
import org.junit.Test;
import com.oracle.truffle.api.bytecode.BytecodeConfig;
import com.oracle.truffle.api.bytecode.BytecodeLocal;
import com.oracle.truffle.api.bytecode.BytecodeRootNode;
import com.oracle.truffle.api.bytecode.BytecodeRootNodes;
import com.oracle.truffle.api.bytecode.BytecodeTier;
import com.oracle.truffle.api.bytecode.ConstantOperand;
import com.oracle.truffle.api.bytecode.ContinuationResult;
import com.oracle.truffle.api.bytecode.ContinuationRootNode;
import com.oracle.truffle.api.bytecode.GenerateBytecode;
import com.oracle.truffle.api.bytecode.Operation;
import com.oracle.truffle.api.bytecode.Variadic;
import com.oracle.truffle.api.bytecode.Yield;
import com.oracle.truffle.api.bytecode.serialization.BytecodeDeserializer;
import com.oracle.truffle.api.bytecode.serialization.BytecodeSerializer;
import com.oracle.truffle.api.bytecode.serialization.SerializationUtils;
import com.oracle.truffle.api.bytecode.test.error_tests.ExpectError;
import com.oracle.truffle.api.bytecode.test.error_tests.ErrorTests.ErrorLanguage;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Fallback;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.frame.MaterializedFrame;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.instrumentation.ExecutionEventNode;
import com.oracle.truffle.api.instrumentation.ExecutionEventNodeFactory;
import com.oracle.truffle.api.instrumentation.Instrumenter;
import com.oracle.truffle.api.instrumentation.SourceSectionFilter;
import com.oracle.truffle.api.instrumentation.TruffleInstrument;
import com.oracle.truffle.api.instrumentation.StandardTags.StatementTag;
import com.oracle.truffle.api.interop.TruffleObject;
import com.oracle.truffle.api.nodes.RootNode;
public class CustomYieldTest {
/**
* Tests basic usage of a custom yield.
*/
@Test
public void testBasic() {
BytecodeRootNodes<CustomYieldTestRootNode> nodes = CustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endReturn();
b.endRoot();
});
CustomYieldTestRootNode root = nodes.getNode(0);
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(123, result.continueWith(123));
}
/**
* Tests that a custom yield can be serialized/deserialized.
*/
@Test
public void testSerialization() throws IOException {
BytecodeRootNodes<CustomYieldTestRootNode> nodes = CustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endReturn();
b.endRoot();
});
ByteArrayOutputStream output = new ByteArrayOutputStream();
nodes.serialize(new DataOutputStream(output), SERIALIZER);
Supplier<DataInput> input = () -> SerializationUtils.createDataInput(ByteBuffer.wrap(output.toByteArray()));
BytecodeRootNodes<CustomYieldTestRootNode> deserialized = CustomYieldTestRootNodeGen.deserialize(null, BytecodeConfig.DEFAULT, input, DESERIALIZER);
CustomYieldTestRootNode root = deserialized.getNode(0);
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(123, result.continueWith(123));
}
/**
* Tests that stack state from ongoing operations is preserved and used in the resumed frame.
*/
@Test
public void testResumeOngoingOperation() {
BytecodeRootNodes<CustomYieldTestRootNode> nodes = CustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginAdd();
b.emitLoadArgument(0);
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endAdd();
b.endReturn();
b.endRoot();
});
CustomYieldTestRootNode root = nodes.getNode(0);
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(63, result.continueWith(21));
}
/**
* A simple root node with a custom yield (and no built-in yield).
*/
@GenerateBytecode(languageClass = BytecodeDSLTestLanguage.class, enableSerialization = true)
public abstract static class CustomYieldTestRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected CustomYieldTestRootNode(BytecodeDSLTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Yield(javadoc = "A simple custom yield operation.")
public static final class CustomYield {
@Specialization
public static Object doYield(Object result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, result);
}
}
@Operation
public static final class Add {
@Specialization
public static int doAdd(int x, int y) {
return x + y;
}
}
}
/**
* Tests that custom yields can be used in uncached.
*/
@Test
public void testUncached() {
BytecodeRootNodes<CustomYieldUncachedRootNode> nodes = CustomYieldUncachedRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endReturn();
b.endRoot();
});
CustomYieldUncachedRootNode root = nodes.getNode(0);
final int uncachedThreshold = 10;
root.getBytecodeNode().setUncachedThreshold(uncachedThreshold);
for (int i = 0; i < uncachedThreshold / 2; i++) {
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
}
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
}
/**
* Tests that stack state from ongoing operations is preserved and used in the resumed frame in
* uncached.
*/
@Test
public void testUncachedResumeOngoingOperation() {
BytecodeRootNodes<CustomYieldUncachedRootNode> nodes = CustomYieldUncachedRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginAdd();
b.emitLoadArgument(0);
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endAdd();
b.endReturn();
b.endRoot();
});
CustomYieldUncachedRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(10);
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(63, result.continueWith(21));
}
/**
* Tests that forceCached works as expected.
*/
@Test
public void testForceCached() {
BytecodeRootNodes<CustomYieldUncachedRootNode> nodes = CustomYieldUncachedRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginCachedOnlyCustomYield();
b.beginCustomYield();
b.emitLoadArgument(0);
b.endCustomYield();
b.endCachedOnlyCustomYield();
b.endReturn();
b.endRoot();
});
CustomYieldUncachedRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(10);
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(42);
assertEquals(42, result.value());
// The inner yield does not force the transition to cached.
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
result = (CustomYieldResult) result.continueWith(123);
// The outer yield forces the transition to cached.
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.value());
assertEquals(456, result.continueWith(456));
}
/**
* A root node with custom yield and uncached support.
*/
@GenerateBytecode(languageClass = BytecodeDSLTestLanguage.class, enableUncachedInterpreter = true)
public abstract static class CustomYieldUncachedRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected CustomYieldUncachedRootNode(BytecodeDSLTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Yield
public static final class CustomYield {
@Specialization
public static Object doYield(Object result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, result);
}
}
@SuppressWarnings("truffle-force-cached")
@Yield(forceCached = true)
public static final class CachedOnlyCustomYield {
@Specialization
public static Object doYield(Object result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, result);
}
}
@Operation
public static final class Add {
@Specialization
public static int doAdd(int x, int y) {
return x + y;
}
}
}
/**
* Tests that custom yields can have multiple specializations (including fallbacks), in both
* cached and uncached.
*/
@Test
public void testMultiSpecializationCustomYield() {
BytecodeRootNodes<SpecializingCustomYieldTestRootNode> nodes = SpecializingCustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginDoubleYield();
b.emitLoadArgument(0);
b.endDoubleYield();
b.endReturn();
b.endRoot();
});
SpecializingCustomYieldTestRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(6);
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(21);
assertEquals(42, result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call("hello");
assertEquals("hellohello", result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call(new int[0]);
assertEquals(null, result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call(21);
assertEquals(42, result.value());
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call("hello");
assertEquals("hellohello", result.value());
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call(new int[0]);
assertEquals(null, result.value());
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
}
/**
* An uncachable root node with a multi-specialization custom yield.
*/
@GenerateBytecode(languageClass = BytecodeDSLTestLanguage.class, enableUncachedInterpreter = true)
public abstract static class SpecializingCustomYieldTestRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected SpecializingCustomYieldTestRootNode(BytecodeDSLTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Yield
public static final class DoubleYield {
@Specialization
public static Object doInt(int result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, result * 2);
}
@Specialization
public static Object doString(String result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, result + result);
}
@Fallback
public static Object doFallback(@SuppressWarnings("unused") Object result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, null);
}
}
}
/**
* Tests that multiple different yields -- including the built-in yield -- work as expected.
*/
@Test
public void testMultipleYields() {
ComplexCustomYieldTestRootNode root = ComplexCustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginYield(); // C
b.beginAddConstantsYield(1); // B
b.emitNoResultYield(); // A
b.endAddConstantsYield(10);
b.endYield();
b.endReturn();
b.endRoot();
}).getNode(0);
// A: no result
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(2);
assertEquals("no result", result.value());
// B: 1 + 10 + 100
ContinuationResult contResult = (ContinuationResult) result.continueWith(100);
assertEquals(111, contResult.getResult());
// C: 77
contResult = (ContinuationResult) contResult.continueWith(77);
assertEquals(77, contResult.getResult());
// return
assertEquals(39, contResult.continueWith(39));
}
/**
* Tests that multiple different yields work with serialization.
*/
@Test
public void testMultipleYieldsSerialization() throws IOException {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginYield(); // C
b.beginAddConstantsYield(1); // B
b.emitNoResultYield(); // A
b.endAddConstantsYield(10);
b.endYield();
b.endReturn();
b.endRoot();
});
ByteArrayOutputStream output = new ByteArrayOutputStream();
nodes.serialize(new DataOutputStream(output), SERIALIZER);
Supplier<DataInput> input = () -> SerializationUtils.createDataInput(ByteBuffer.wrap(output.toByteArray()));
BytecodeRootNodes<ComplexCustomYieldTestRootNode> deserialized = ComplexCustomYieldTestRootNodeGen.deserialize(null, BytecodeConfig.DEFAULT, input, DESERIALIZER);
ComplexCustomYieldTestRootNode root = deserialized.getNode(0);
// A: no result
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(2);
assertEquals("no result", result.value());
// B: 1 + 10 + 100
ContinuationResult contResult = (ContinuationResult) result.continueWith(100);
assertEquals(111, contResult.getResult());
// C: 77
contResult = (ContinuationResult) contResult.continueWith(77);
assertEquals(77, contResult.getResult());
// return
assertEquals(39, contResult.continueWith(39));
}
/**
* Tests that stack state from ongoing operations is preserved and used in the resumed frame.
*/
@Test
public void testMultipleYieldsResumeOngoingOperation() {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal local = b.createLocal();
b.beginStoreLocal(local);
b.emitLoadConstant(42);
b.endStoreLocal();
b.beginReturn();
b.beginAddMany();
b.emitLoadArgument(0);
b.emitNoResultYield(); // A
b.beginYield(); // B
b.emitLoadLocal(local);
b.endYield();
b.beginAddConstantsYield(10); // C
b.emitLoadArgument(0);
b.endAddConstantsYield(20);
b.endAddMany();
b.endReturn();
b.endRoot();
});
ComplexCustomYieldTestRootNode root = nodes.getNode(0);
// Test behaviour for uncached and cached (and a transition in between iterations).
root.getBytecodeNode().setUncachedThreshold(6);
for (int i = 0; i < 3; i++) {
// A: no result
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call(7);
assertEquals("no result", result.value());
// B: 42
ContinuationResult cont = (ContinuationResult) result.continueWith(70);
assertEquals(42, cont.getResult());
// C: 10 + 7 + 20
cont = (ContinuationResult) cont.continueWith(700);
assertEquals(37, cont.getResult());
// return: 7 + 70 + 700 + 7000
assertEquals(7777, cont.continueWith(7000));
}
}
@Test
public void testBoxingElimination() {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(null, BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginBlock();
b.beginAddConstantsYield(30);
b.emitLoadArgument(0);
b.endAddConstantsYield(10);
b.endBlock();
b.endReturn();
b.endRoot();
});
ComplexCustomYieldTestRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(0);
AbstractInstructionTest.assertInstructions(root,
"load.argument",
"c.AddConstantsYield",
"return");
ContinuationResult cont = (ContinuationResult) root.getCallTarget().call(2);
assertEquals(42, cont.getResult());
AbstractInstructionTest.assertInstructions(root,
"load.argument$Int",
"c.AddConstantsYield$Int",
"return");
assertEquals(123, cont.continueWith(123));
cont = (ContinuationResult) root.getCallTarget().call("foo");
assertEquals("30foo10", cont.getResult());
AbstractInstructionTest.assertInstructions(root,
"load.argument",
"c.AddConstantsYield",
"return");
assertEquals(123, cont.continueWith(123));
}
/**
* Tests that tag instrumentation works as expected.
*/
@Test
public void testTagInstrumentation() {
runInstrumentationTest((context, instrumenter) -> {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(BytecodeDSLTestLanguage.REF.get(null), BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginTag(StatementTag.class);
b.emitNoResultYield();
b.endTag(StatementTag.class);
b.endReturn();
b.endRoot();
});
ComplexCustomYieldTestRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(4);
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
CustomYieldResult result = (CustomYieldResult) root.getCallTarget().call();
assertEquals("no result", result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
List<Object> yieldValues = new ArrayList<>();
AtomicInteger resumeCount = new AtomicInteger();
instrumenter.attachExecutionEventFactory(SourceSectionFilter.newBuilder().tagIs(StatementTag.class).build(), createFactory(yieldValues, resumeCount));
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call();
assertEquals("no result", result.value());
assertEquals(BytecodeTier.UNCACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, result.continueWith(123));
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
result = (CustomYieldResult) root.getCallTarget().call(19);
assertEquals("no result", result.value());
assertEquals(BytecodeTier.CACHED, result.root().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(456, result.continueWith(456));
assertEquals(Arrays.asList(null, null), yieldValues);
assertEquals(2, resumeCount.get());
});
}
/**
* Tests that implicit tag instrumentation works as expected.
*/
@Test
public void testImplicitTagInstrumentation() {
runInstrumentationTest((context, instrumenter) -> {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(BytecodeDSLTestLanguage.REF.get(null), BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginReturn();
b.beginAddConstantsYield(1);
b.emitLoadArgument(0);
b.endAddConstantsYield(1);
b.endReturn();
b.endRoot();
});
ComplexCustomYieldTestRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(4);
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
ContinuationResult cont = (ContinuationResult) root.getCallTarget().call(40);
assertEquals(42, cont.getResult());
assertEquals(BytecodeTier.UNCACHED, cont.getContinuationRootNode().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, cont.continueWith(123));
List<Object> yieldValues = new ArrayList<>();
AtomicInteger resumeCount = new AtomicInteger();
instrumenter.attachExecutionEventFactory(SourceSectionFilter.newBuilder().tagIs(StatementTag.class).build(), createFactory(yieldValues, resumeCount));
assertEquals(BytecodeTier.UNCACHED, root.getBytecodeNode().getTier());
cont = (ContinuationResult) root.getCallTarget().call(40);
assertEquals(42, cont.getResult());
assertEquals(BytecodeTier.UNCACHED, cont.getContinuationRootNode().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(123, cont.continueWith(123));
assertEquals(BytecodeTier.CACHED, root.getBytecodeNode().getTier());
cont = (ContinuationResult) root.getCallTarget().call(19);
assertEquals(21, cont.getResult());
assertEquals(BytecodeTier.CACHED, cont.getContinuationRootNode().getSourceRootNode().getBytecodeNode().getTier());
assertEquals(456, cont.continueWith(456));
assertEquals(List.of(40, 19), yieldValues);
assertEquals(2, resumeCount.get());
});
}
@Test
public void testYieldQuickeningRegressionTest() {
/*
* Regression test for a quickening bug. The yield's childBci calculation did not account
* for tag.resume instructions, and under the right circumstances, the parent operation
* would "quicken" an instruction operand, leading to unexpected results.
*/
runInstrumentationTest((context, instrumenter) -> {
BytecodeRootNodes<ComplexCustomYieldTestRootNode> nodes = ComplexCustomYieldTestRootNodeGen.create(BytecodeDSLTestLanguage.REF.get(null), BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.emitLoadConstant(42);
b.beginReturn();
b.beginAddConstantsYield(1);
b.emitLoadArgument(0);
b.endAddConstantsYield(1);
b.endReturn();
b.endRoot();
});
ComplexCustomYieldTestRootNode root = nodes.getNode(0);
root.getBytecodeNode().setUncachedThreshold(0);
List<Object> yieldValues = new ArrayList<>();
AtomicInteger resumeCount = new AtomicInteger();
instrumenter.attachExecutionEventFactory(SourceSectionFilter.newBuilder().tagIs(StatementTag.class).build(), createFactory(yieldValues, resumeCount));
ContinuationResult cont = (ContinuationResult) root.getCallTarget().call(0);
assertEquals(2, cont.getResult());
assertEquals(123, cont.continueWith(123));
cont = (ContinuationResult) root.getCallTarget().call(0);
assertEquals(2, cont.getResult());
assertEquals(123, cont.continueWith(123));
});
}
private static void runInstrumentationTest(BiConsumer<Context, Instrumenter> test) {
Context context = Context.create(BytecodeDSLTestLanguage.ID);
try {
context.initialize(BytecodeDSLTestLanguage.ID);
context.enter();
Instrumenter instrumenter = context.getEngine().getInstruments().get(CustomYieldTestInstrument.ID).lookup(Instrumenter.class);
// run the test
test.accept(context, instrumenter);
} finally {
context.close();
}
}
private static ExecutionEventNodeFactory createFactory(List<Object> yieldValues, AtomicInteger resumeCount) {
return (e) -> {
return new ExecutionEventNode() {
@Override
public void onYield(VirtualFrame frame, Object value) {
yieldValues.add(value);
}
@Override
protected void onResume(VirtualFrame frame) {
resumeCount.getAndIncrement();
}
};
};
}
/**
* A root node with most Bytecode DSL features enabled (uncached, tag instrumentation,
* serialization, BE).
*/
@GenerateBytecode(languageClass = BytecodeDSLTestLanguage.class, enableYield = true, enableUncachedInterpreter = true, enableTagInstrumentation = true, enableSerialization = true, boxingEliminationTypes = {
int.class})
public abstract static class ComplexCustomYieldTestRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected ComplexCustomYieldTestRootNode(BytecodeDSLTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
public static final class AddMany {
@Specialization
static int add(int w, int x, int y, int z) {
return w + x + y + z;
}
}
@Yield
public static final class NoResultYield {
@Specialization
public static Object doNoArgs(@Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return new CustomYieldResult(root, frame, "no result");
}
}
@Yield(tags = {StatementTag.class})
@ConstantOperand(type = int.class, name = "addend1")
@ConstantOperand(type = int.class, name = "addend2", specifyAtEnd = true)
public static final class AddConstantsYield {
@Specialization
public static Object doInt(int addend1, int result, int addend2, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return ContinuationResult.create(root, frame, addend1 + result + addend2);
}
@Specialization
public static Object doString(int addend1, String result, int addend2, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return ContinuationResult.create(root, frame, addend1 + result + addend2);
}
}
}
@TruffleInstrument.Registration(id = CustomYieldTestInstrument.ID, services = Instrumenter.class)
public static class CustomYieldTestInstrument extends TruffleInstrument {
public static final String ID = "CustomYieldTestInstrument";
@Override
protected void onCreate(Env env) {
env.registerService(env.getInstrumenter());
}
}
record CustomYieldResult(ContinuationRootNode root, MaterializedFrame frame, Object value) implements TruffleObject {
public Object continueWith(Object resumeValue) {
return root.getCallTarget().call(frame, resumeValue);
}
}
static final BytecodeSerializer SERIALIZER = new BytecodeSerializer() {
public void serialize(SerializerContext context, DataOutput buffer, Object object) throws IOException {
if (object instanceof Integer i) {
buffer.writeInt(i.intValue());
} else {
throw new IllegalArgumentException("Unsupported constant " + object);
}
}
};
static final BytecodeDeserializer DESERIALIZER = new BytecodeDeserializer() {
public Object deserialize(DeserializerContext context, DataInput buffer) throws IOException {
return buffer.readInt();
}
};
@Test
public void testCustomYieldWithBEableReturnType() {
for (var instructionsClass : CustomYieldWithBEableReturnTypeTestGen.class.getDeclaredClasses()) {
if (!instructionsClass.getSimpleName().equals("Instructions")) {
continue;
}
String baseName = "CUSTOM_YIELD_WITH_BEABLE_RETURN_TYPE";
List<String> yieldInstructions = Stream.of(instructionsClass.getDeclaredFields()).map(Field::getName).filter(name -> name.contains(baseName)).toList();
if (yieldInstructions.size() != 1) {
fail("Expected one instruction for custom yield, but %d were found (%s). Was a return-type BE variant generated?".formatted(yieldInstructions.size(), yieldInstructions));
}
return;
}
fail("Could not find Instructions class");
}
@GenerateBytecode(languageClass = ErrorLanguage.class, boxingEliminationTypes = {int.class})
public abstract static class CustomYieldWithBEableReturnTypeTest extends RootNode implements BytecodeRootNode {
protected CustomYieldWithBEableReturnTypeTest(ErrorLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Yield
public static final class CustomYieldWithBEableReturnType {
@Specialization
public static int doYield() {
return 42;
}
}
}
@SuppressWarnings("unused")
@GenerateBytecode(languageClass = ErrorLanguage.class)
public abstract static class InvalidYieldTest extends RootNode implements BytecodeRootNode {
protected InvalidYieldTest(ErrorLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
public static final class Add {
@Specialization
static int add(int x, int y) {
return x + y;
}
}
@ExpectError("A @Yield cannot be void. It must return a value, which becomes the result yielded to the caller.")
@Yield
public static final class CustomYieldBadReturnType {
@Specialization
public static void doYield(Object result, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
}
}
@ExpectError("A @Yield must take zero or one dynamic operands.")
@Yield
public static final class CustomYieldTooManyOperands {
@Specialization
public static Object doInt(int arg1, int arg2, @Bind ContinuationRootNode root, @Bind MaterializedFrame frame) {
return null;
}
}
@ExpectError("@Variadic can only be used on @Operation classes.")
@Variadic
@Yield
public static final class CustomVariadicYield {
@Specialization
public static Object[] doYield(Object[] result) {
return result;
}
}
@Operation
public static final class OperationBadBind {
@Specialization
public static void doYield(
@ExpectError("This expression binds a continuation root node, which can only be bound in a @Yield. " +
"Remove this bind expression or redefine the operation as a @Yield to resolve this error.")//
@Bind ContinuationRootNode root) {
}
}
}
}
|
googleapis/google-cloud-java | 37,974 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/CreateServingConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/serving_config_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Request for CreateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.CreateServingConfigRequest}
*/
public final class CreateServingConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.CreateServingConfigRequest)
CreateServingConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateServingConfigRequest.newBuilder() to construct.
private CreateServingConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateServingConfigRequest() {
parent_ = "";
servingConfigId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateServingConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_CreateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_CreateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.CreateServingConfigRequest.class,
com.google.cloud.retail.v2.CreateServingConfigRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SERVING_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.retail.v2.ServingConfig servingConfig_;
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
@java.lang.Override
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
@java.lang.Override
public com.google.cloud.retail.v2.ServingConfig getServingConfig() {
return servingConfig_ == null
? com.google.cloud.retail.v2.ServingConfig.getDefaultInstance()
: servingConfig_;
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.ServingConfigOrBuilder getServingConfigOrBuilder() {
return servingConfig_ == null
? com.google.cloud.retail.v2.ServingConfig.getDefaultInstance()
: servingConfig_;
}
public static final int SERVING_CONFIG_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object servingConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The servingConfigId.
*/
@java.lang.Override
public java.lang.String getServingConfigId() {
java.lang.Object ref = servingConfigId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
servingConfigId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for servingConfigId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServingConfigIdBytes() {
java.lang.Object ref = servingConfigId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
servingConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getServingConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(servingConfigId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, servingConfigId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getServingConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(servingConfigId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, servingConfigId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.CreateServingConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.CreateServingConfigRequest other =
(com.google.cloud.retail.v2.CreateServingConfigRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasServingConfig() != other.hasServingConfig()) return false;
if (hasServingConfig()) {
if (!getServingConfig().equals(other.getServingConfig())) return false;
}
if (!getServingConfigId().equals(other.getServingConfigId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasServingConfig()) {
hash = (37 * hash) + SERVING_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getServingConfig().hashCode();
}
hash = (37 * hash) + SERVING_CONFIG_ID_FIELD_NUMBER;
hash = (53 * hash) + getServingConfigId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2.CreateServingConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for CreateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.CreateServingConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.CreateServingConfigRequest)
com.google.cloud.retail.v2.CreateServingConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_CreateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_CreateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.CreateServingConfigRequest.class,
com.google.cloud.retail.v2.CreateServingConfigRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2.CreateServingConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getServingConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
servingConfigId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_CreateServingConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateServingConfigRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2.CreateServingConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateServingConfigRequest build() {
com.google.cloud.retail.v2.CreateServingConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateServingConfigRequest buildPartial() {
com.google.cloud.retail.v2.CreateServingConfigRequest result =
new com.google.cloud.retail.v2.CreateServingConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.CreateServingConfigRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.servingConfig_ =
servingConfigBuilder_ == null ? servingConfig_ : servingConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.servingConfigId_ = servingConfigId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.CreateServingConfigRequest) {
return mergeFrom((com.google.cloud.retail.v2.CreateServingConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.CreateServingConfigRequest other) {
if (other == com.google.cloud.retail.v2.CreateServingConfigRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasServingConfig()) {
mergeServingConfig(other.getServingConfig());
}
if (!other.getServingConfigId().isEmpty()) {
servingConfigId_ = other.servingConfigId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getServingConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
servingConfigId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.retail.v2.ServingConfig servingConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>
servingConfigBuilder_;
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
public com.google.cloud.retail.v2.ServingConfig getServingConfig() {
if (servingConfigBuilder_ == null) {
return servingConfig_ == null
? com.google.cloud.retail.v2.ServingConfig.getDefaultInstance()
: servingConfig_;
} else {
return servingConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(com.google.cloud.retail.v2.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
servingConfig_ = value;
} else {
servingConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(
com.google.cloud.retail.v2.ServingConfig.Builder builderForValue) {
if (servingConfigBuilder_ == null) {
servingConfig_ = builderForValue.build();
} else {
servingConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeServingConfig(com.google.cloud.retail.v2.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& servingConfig_ != null
&& servingConfig_ != com.google.cloud.retail.v2.ServingConfig.getDefaultInstance()) {
getServingConfigBuilder().mergeFrom(value);
} else {
servingConfig_ = value;
}
} else {
servingConfigBuilder_.mergeFrom(value);
}
if (servingConfig_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearServingConfig() {
bitField0_ = (bitField0_ & ~0x00000002);
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.ServingConfig.Builder getServingConfigBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getServingConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.ServingConfigOrBuilder getServingConfigOrBuilder() {
if (servingConfigBuilder_ != null) {
return servingConfigBuilder_.getMessageOrBuilder();
} else {
return servingConfig_ == null
? com.google.cloud.retail.v2.ServingConfig.getDefaultInstance()
: servingConfig_;
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ServingConfig serving_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>
getServingConfigFieldBuilder() {
if (servingConfigBuilder_ == null) {
servingConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>(
getServingConfig(), getParentForChildren(), isClean());
servingConfig_ = null;
}
return servingConfigBuilder_;
}
private java.lang.Object servingConfigId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The servingConfigId.
*/
public java.lang.String getServingConfigId() {
java.lang.Object ref = servingConfigId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
servingConfigId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for servingConfigId.
*/
public com.google.protobuf.ByteString getServingConfigIdBytes() {
java.lang.Object ref = servingConfigId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
servingConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The servingConfigId to set.
* @return This builder for chaining.
*/
public Builder setServingConfigId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
servingConfigId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearServingConfigId() {
servingConfigId_ = getDefaultInstance().getServingConfigId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the ServingConfig, which will become the final
* component of the ServingConfig's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string serving_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for servingConfigId to set.
* @return This builder for chaining.
*/
public Builder setServingConfigIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
servingConfigId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.CreateServingConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.CreateServingConfigRequest)
private static final com.google.cloud.retail.v2.CreateServingConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.CreateServingConfigRequest();
}
public static com.google.cloud.retail.v2.CreateServingConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateServingConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateServingConfigRequest>() {
@java.lang.Override
public CreateServingConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateServingConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateServingConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateServingConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/systemds | 37,214 | src/main/java/org/apache/sysds/runtime/compress/colgroup/AColGroup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.compress.colgroup;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysds.runtime.compress.colgroup.ColGroupUtils.P;
import org.apache.sysds.runtime.compress.CompressionSettings;
import org.apache.sysds.runtime.compress.CompressionSettingsBuilder;
import org.apache.sysds.runtime.compress.colgroup.indexes.ColIndexFactory;
import org.apache.sysds.runtime.compress.colgroup.indexes.IColIndex;
import org.apache.sysds.runtime.compress.colgroup.indexes.IColIndex.SliceResult;
import org.apache.sysds.runtime.compress.colgroup.scheme.ICLAScheme;
import org.apache.sysds.runtime.compress.cost.ComputationCostEstimator;
import org.apache.sysds.runtime.compress.estim.CompressedSizeInfo;
import org.apache.sysds.runtime.compress.estim.CompressedSizeInfoColGroup;
import org.apache.sysds.runtime.compress.estim.encoding.IEncode;
import org.apache.sysds.runtime.compress.lib.CLALibCombineGroups;
import org.apache.sysds.runtime.data.DenseBlock;
import org.apache.sysds.runtime.data.SparseBlock;
import org.apache.sysds.runtime.data.SparseBlockMCSR;
import org.apache.sysds.runtime.functionobjects.Plus;
import org.apache.sysds.runtime.instructions.cp.CM_COV_Object;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.matrix.operators.AggregateUnaryOperator;
import org.apache.sysds.runtime.matrix.operators.BinaryOperator;
import org.apache.sysds.runtime.matrix.operators.CMOperator;
import org.apache.sysds.runtime.matrix.operators.ScalarOperator;
import org.apache.sysds.runtime.matrix.operators.UnaryOperator;
/**
* Abstract Class that is the lowest class type for the Compression framework.
*
* AColGroup store information about a number of columns.
*
*/
public abstract class AColGroup implements Serializable {
protected static final Log LOG = LogFactory.getLog(AColGroup.class.getName());
private static final long serialVersionUID = -1318908671481L;
/** Public super types of compression ColGroups supported */
public static enum CompressionType {
UNCOMPRESSED, RLE, OLE, DDC, CONST, EMPTY, SDC, SDCFOR, DDCFOR, DeltaDDC, LinearFunctional;
public boolean isDense() {
return this == DDC || this == CONST || this == DDCFOR || this == DDCFOR;
}
public boolean isConst() {
return this == CONST || this == EMPTY;
}
public boolean isSDC() {
return this == SDC;
}
}
/**
* Concrete ColGroupType
*
* Protected such that outside the ColGroup package it should be unknown which specific subtype is used.
*/
protected static enum ColGroupType {
UNCOMPRESSED, RLE, OLE, DDC, CONST, EMPTY, SDC, SDCSingle, SDCSingleZeros, SDCZeros, SDCFOR, DDCFOR, DeltaDDC,
LinearFunctional;
}
/** The ColGroup indexes contained in the ColGroup */
protected final IColIndex _colIndexes;
/**
* Main constructor.
*
* @param colIndices offsets of the columns in the matrix block that make up the group
*/
protected AColGroup(IColIndex colIndices) {
_colIndexes = colIndices;
}
/**
* Obtain the offsets of the columns in the matrix block that make up the group
*
* @return offsets of the columns in the matrix block that make up the group
*/
public final IColIndex getColIndices() {
return _colIndexes;
}
/**
* Obtain the number of columns in this column group.
*
* @return number of columns in this column group
*/
public final int getNumCols() {
return _colIndexes.size();
}
/**
* Shift all column indexes contained by an offset.
*
* This is used for rbind to combine compressed matrices.
*
* Since column indexes are reused between operations, we allocate a new list here to be safe
*
* @param offset The offset to move all columns
* @return A new column group object with the shifted columns
*/
public final AColGroup shiftColIndices(int offset) {
return copyAndSet(_colIndexes.shift(offset));
}
/**
* Copy the content of the column group with pointers to the previous content but with new column given Note this
* method does not verify if the colIndexes specified are valid and correct dimensions for the underlying column
* groups.
*
* @param colIndexes the new indexes to use in the copy
* @return a new object with pointers to underlying data.
*/
public abstract AColGroup copyAndSet(IColIndex colIndexes);
/**
* Get the upper bound estimate of in memory allocation for the column group.
*
* @return an upper bound on the number of bytes used to store this ColGroup in memory.
*/
public long estimateInMemorySize() {
long size = 16; // object header
size += _colIndexes.estimateInMemorySize();
return size;
}
/**
* Decompress a range of rows into a sparse block
*
* Note that this is using append, so the sparse column indexes need to be sorted afterwards.
*
* @param sb Sparse Target block
* @param rl Row to start at
* @param ru Row to end at
*/
public final void decompressToSparseBlock(SparseBlock sb, int rl, int ru) {
decompressToSparseBlock(sb, rl, ru, 0, 0);
}
/**
* Decompress a range of rows into a dense block
*
* @param db Dense target block
* @param rl Row to start at
* @param ru Row to end at
*/
public final void decompressToDenseBlock(DenseBlock db, int rl, int ru) {
decompressToDenseBlock(db, rl, ru, 0, 0);
}
/**
* Decompress a range of rows into a dense transposed block.
*
* @param db Dense target block
* @param rl Row in this column group to start at.
* @param ru Row in this column group to end at.
*/
public abstract void decompressToDenseBlockTransposed(DenseBlock db, int rl, int ru);
/**
* Decompress the column group to the sparse transposed block. Note that the column groups would only need to
* decompress into specific sub rows of the Sparse block
*
* @param sb Sparse target block
* @param nColOut The number of columns in the sb.
*/
public abstract void decompressToSparseBlockTransposed(SparseBlockMCSR sb, int nColOut);
/**
* Serializes column group to data output.
*
* @param out data output
* @throws IOException if IOException occurs
*/
protected void write(DataOutput out) throws IOException {
final byte[] o = new byte[1];
o[0] = (byte) getColGroupType().ordinal();
out.write(o);
_colIndexes.write(out);
}
/**
* Returns the exact serialized size of column group. This can be used for example for buffer preallocation.
*
* @return exact serialized size for column group
*/
public long getExactSizeOnDisk() {
long ret = 0;
ret += 1; // type info (colGroup ordinal)
ret += _colIndexes.getExactSizeOnDisk();
return ret;
}
/**
* Slice out the columns within the range of cl and cu to remove the dictionary values related to these columns. If
* the ColGroup slicing from does not contain any columns within the range null is returned.
*
* @param cl The lower bound of the columns to select
* @param cu The upper bound of the columns to select (not inclusive).
* @return A cloned Column Group, with a copied pointer to the old column groups index structure, but reduced
* dictionary and _columnIndexes correctly aligned with the expected sliced compressed matrix.
*/
public final AColGroup sliceColumns(int cl, int cu) {
if(cl <= _colIndexes.get(0) && cu > _colIndexes.get(_colIndexes.size() - 1)) {
if(cl == 0)
return this;
else
return this.shiftColIndices(-cl);
}
else if(cu - cl == 1)
return sliceColumn(cl);
else
return sliceMultiColumns(cl, cu);
}
/**
* Slice out a single column from the column group.
*
* @param col The column to slice, the column could potentially not be inside the column group
* @return A new column group that is a single column, if the column requested is not in this column group null is
* returned.
*/
public final AColGroup sliceColumn(int col) {
int idx = _colIndexes.findIndex(col);
if(idx >= 0)
return sliceSingleColumn(idx);
else
return null;
}
/**
* Slice out multiple columns within the interval between the given indexes.
*
* @param cl The lower column index to slice from
* @param cu The upper column index to slice to, (not included)
* @return A column group of this containing the columns specified, returns null if the columns specified is not
* contained in the column group
*/
protected final AColGroup sliceMultiColumns(int cl, int cu) {
SliceResult sr = _colIndexes.slice(cl, cu);
if(sr.ret != null)
return sliceMultiColumns(sr.idStart, sr.idEnd, sr.ret);
else
return null;
}
/**
* Compute the column sum of the given list of groups
*
* @param groups The Groups to sum
* @param res The result to put the values into
* @param nRows The number of rows in the groups
* @return The given res list, where the sum of the column groups is added
*/
public static double[] colSum(Collection<AColGroup> groups, double[] res, int nRows) {
for(AColGroup g : groups)
g.computeColSums(res, nRows);
return res;
}
/**
* Get the value at a global row/column position.
*
* In general this performs since a binary search of colIndexes is performed for each lookup.
*
* @param r row
* @param c column
* @return value at the row/column position
*/
public double get(int r, int c) {
final int colIdx = _colIndexes.findIndex(c);
if(colIdx < 0)
return 0;
else
return getIdx(r, colIdx);
}
/**
* Get the value at a colGroup specific row/column index position.
*
* @param r row
* @param colIdx column index in the _colIndexes.
* @return value at the row/column index position
*/
public abstract double getIdx(int r, int colIdx);
/**
* Obtain number of distinct tuples in contained sets of values associated with this column group.
*
* If the column group is uncompressed the number or rows is returned.
*
* @return the number of distinct sets of values associated with the bitmaps in this column group
*/
public abstract int getNumValues();
/**
* Obtain the compression type.
*
* @return How the elements of the column group are compressed.
*/
public abstract CompressionType getCompType();
/**
* Internally get the specific type of ColGroup, this could be extracted from the object but that does not allow for
* nice switches in the code.
*
* @return ColGroupType of the object.
*/
protected abstract ColGroupType getColGroupType();
/**
* Decompress into the DenseBlock. (no NNZ handling)
*
* @param db Target DenseBlock
* @param rl Row to start decompression from
* @param ru Row to end decompression at (not inclusive)
* @param offR Row offset into the target to decompress
* @param offC Column offset into the target to decompress
*/
public abstract void decompressToDenseBlock(DenseBlock db, int rl, int ru, int offR, int offC);
/**
* Decompress into the SparseBlock. (no NNZ handling)
*
* Note this method is allowing to calls to append since it is assumed that the sparse column indexes are sorted
* afterwards
*
* @param sb Target SparseBlock
* @param rl Row to start decompression from
* @param ru Row to end decompression at (not inclusive)
* @param offR Row offset into the target to decompress
* @param offC Column offset into the target to decompress
*/
public abstract void decompressToSparseBlock(SparseBlock sb, int rl, int ru, int offR, int offC);
/**
* Right matrix multiplication with this column group.
*
* This method can return null, meaning that the output overlapping group would have been empty.
*
* @param right The MatrixBlock on the right of this matrix multiplication
* @return The new Column Group or null that is the result of the matrix multiplication.
*/
public final AColGroup rightMultByMatrix(MatrixBlock right) {
return rightMultByMatrix(right, null, 1);
}
/**
* Right matrix multiplication with this column group.
*
* This method can return null, meaning that the output overlapping group would have been empty.
*
* @param right The MatrixBlock on the right of this matrix multiplication
* @param allCols A pre-materialized list of all col indexes, that can be shared across all column groups if use
* full, can be set to null.
* @param k The parallelization degree allowed internally in this operation.
* @return The new Column Group or null that is the result of the matrix multiplication.
*/
public abstract AColGroup rightMultByMatrix(MatrixBlock right, IColIndex allCols, int k);
/**
* Right side Matrix multiplication, iterating though this column group and adding to the ret
*
* @param right Right side matrix to multiply with.
* @param ret The return matrix to add results to
* @param rl The row of this column group to multiply from
* @param ru The row of this column group to multiply to (not inclusive)
* @param crl The right hand side column lower
* @param cru The right hand side column upper
* @param nRows The number of rows in this column group
*/
public void rightDecompressingMult(MatrixBlock right, MatrixBlock ret, int rl, int ru, int nRows, int crl, int cru){
throw new NotImplementedException("not supporting right Decompressing Multiply on class: " + this.getClass().getSimpleName());
}
/**
* Do a transposed self matrix multiplication on the left side t(x) %*% x. but only with this column group.
*
* This gives better performance since there is no need to iterate through all the rows of the matrix, but the
* execution can be limited to its number of distinct values.
*
* Note it only calculate the upper triangle
*
* @param ret The return matrix block [numColumns x numColumns]
* @param nRows The number of rows in the column group
*/
public abstract void tsmm(MatrixBlock ret, int nRows);
/**
* Left multiply with this column group.
*
* @param matrix The matrix to multiply with on the left
* @param result The result to output the values into, always dense for the purpose of the column groups
* parallelizing
* @param rl The row to begin the multiplication from on the lhs matrix
* @param ru The row to end the multiplication at on the lhs matrix
* @param cl The column to begin the multiplication from on the lhs matrix
* @param cu The column to end the multiplication at on the lhs matrix
*/
public abstract void leftMultByMatrixNoPreAgg(MatrixBlock matrix, MatrixBlock result, int rl, int ru, int cl,
int cu);
/**
* Left side matrix multiplication with a column group that is transposed.
*
* @param lhs The left hand side Column group to multiply with, the left hand side should be considered
* transposed. Also it should be guaranteed that this column group is not empty.
* @param result The result matrix to insert the result of the multiplication into
* @param nRows Number of rows in the lhs colGroup
*/
public abstract void leftMultByAColGroup(AColGroup lhs, MatrixBlock result, int nRows);
/**
* Matrix multiply with this other column group, but:
*
* 1. Only output upper triangle values.
*
* 2. Multiply both ways with "this" being on the left and on the right.
*
* It should be guaranteed that the input is not the same as the caller of the method.
*
* The second step is achievable by treating the initial multiplied matrix, and adding its values to the correct
* locations in the output.
*
* @param other The other Column group to multiply with
* @param result The result matrix to put the results into
*/
public abstract void tsmmAColGroup(AColGroup other, MatrixBlock result);
/**
* Perform the specified scalar operation directly on the compressed column group, without decompressing individual
* cells if possible.
*
* @param op operation to perform
* @return version of this column group with the operation applied
*/
public abstract AColGroup scalarOperation(ScalarOperator op);
/**
* Perform a binary row operation.
*
* @param op The operation to execute
* @param v The vector of values to apply the values contained should be at least the length of the highest
* value in the column index
* @param isRowSafe True if the binary op is applied to an entire zero row and all results are zero
* @return A updated column group with the new values.
*/
public abstract AColGroup binaryRowOpLeft(BinaryOperator op, double[] v, boolean isRowSafe);
/**
* Short hand add operator call on column group to add a row vector to the column group
*
* @param v The vector to add
* @return A new column group where the vector is added.
*/
public AColGroup addVector(double[] v) {
return binaryRowOpRight(new BinaryOperator(Plus.getPlusFnObject(), 1), v, false);
}
/**
* Perform a binary row operation.
*
* @param op The operation to execute
* @param v The vector of values to apply the values contained should be at least the length of the highest
* value in the column index
* @param isRowSafe True if the binary op is applied to an entire zero row and all results are zero
* @return A updated column group with the new values.
*/
public abstract AColGroup binaryRowOpRight(BinaryOperator op, double[] v, boolean isRowSafe);
/**
* Unary Aggregate operator, since aggregate operators require new object output, the output becomes an uncompressed
* matrix.
*
* The range of rl to ru only applies to row aggregates. (ReduceCol)
*
* @param op The operator used
* @param c The output matrix block
* @param nRows The total number of rows in the Column Group
* @param rl The starting row to do aggregation from
* @param ru The last row to do aggregation to (not included)
*/
public abstract void unaryAggregateOperations(AggregateUnaryOperator op, double[] c, int nRows, int rl, int ru);
/**
* Slice out column at specific index of this column group.
*
* It is guaranteed that the column to slice is contained in this columnGroup.
*
* @param idx The column index to slice out.
* @return A new column group containing the columns inside. (never null)
*/
protected abstract AColGroup sliceSingleColumn(int idx);
/**
* Slice range of columns inside this column group.
*
* It is guaranteed that the columns to slice is contained in this columnGroup.
*
* @param idStart The column index to start at
* @param idEnd The column index to end at (not included)
* @param outputCols The output columns to extract materialized for ease of implementation
* @return The sliced ColGroup from this. (never null)
*/
protected abstract AColGroup sliceMultiColumns(int idStart, int idEnd, IColIndex outputCols);
/**
* Slice range of rows out of the column group and return a new column group only containing the row segment.
*
* Note that this slice should maintain pointers back to the original dictionaries and only modify index structures.
*
* @param rl The row to start at
* @param ru The row to end at (not included)
* @return A new column group containing the specified row range.
*/
public abstract AColGroup sliceRows(int rl, int ru);
/**
* Short hand method for getting minimum value contained in this column group.
*
* @return The minimum value contained in this ColumnGroup
*/
public abstract double getMin();
/**
* Short hand method for getting maximum value contained in this column group.
*
* @return The maximum value contained in this ColumnGroup
*/
public abstract double getMax();
/**
* Short hand method for getting the sum of this column group
*
* @param nRows The number of rows in the column group
* @return The sum of this column group
*/
public abstract double getSum(int nRows);
/**
* Detect if the column group contains a specific value.
*
* @param pattern The value to look for.
* @return boolean saying true if the value is contained.
*/
public abstract boolean containsValue(double pattern);
/**
* Get the number of nonZeros contained in this column group.
*
* @param nRows The number of rows in the column group, this is used for groups that does not contain information
* about how many rows they have.
* @return The nnz.
*/
public abstract long getNumberNonZeros(int nRows);
/**
* Make a copy of the column group values, and replace all values that match pattern with replacement value.
*
* @param pattern The value to look for
* @param replace The value to replace the other value with
* @return A new Column Group, reusing the index structure but with new values.
*/
public abstract AColGroup replace(double pattern, double replace);
/**
* Compute the column sum
*
* @param c The array to add the column sum to.
* @param nRows The number of rows in the column group.
*/
public abstract void computeColSums(double[] c, int nRows);
/**
* Central Moment instruction executed on a column group.
*
* @param op The Operator to use.
* @param nRows The number of rows contained in the ColumnGroup.
* @return A Central Moment object.
*/
public abstract CM_COV_Object centralMoment(CMOperator op, int nRows);
/**
* Expand the column group to multiple columns. (one hot encode the column group)
*
* @param max The number of columns to expand to and cutoff values at.
* @param ignore If zero and negative values should be ignored.
* @param cast If the double values contained should be cast to whole numbers.
* @param nRows The number of rows in the column group.
* @return A new column group containing max number of columns.
*/
public abstract AColGroup rexpandCols(int max, boolean ignore, boolean cast, int nRows);
/**
* Get the computation cost associated with this column group.
*
* @param e The computation cost estimator
* @param nRows the number of rows in the column group
* @return The cost of this column group
*/
public abstract double getCost(ComputationCostEstimator e, int nRows);
/**
* Perform unary operation on the column group and return a new column group
*
* @param op The operation to perform
* @return The new column group
*/
public abstract AColGroup unaryOperation(UnaryOperator op);
/**
* Get if the group is only containing zero
*
* @return true if empty
*/
public abstract boolean isEmpty();
/**
* Append the other column group to this column group. This method tries to combine them to return a new column group
* containing both. In some cases it is possible in reasonable time, in others it is not.
*
* The result is first this column group followed by the other column group in higher row values.
*
* If it is not possible or very inefficient null is returned.
*
* @param g The other column group
* @return A combined column group or null
*/
public abstract AColGroup append(AColGroup g);
/**
* Append all column groups in the list provided together in one go allocating the output once.
*
* If it is not possible or very inefficient null is returned.
*
* @param groups The groups to combine.
* @param blen The normal number of rows in the groups
* @param rlen The total number of rows of all combined.
* @return A combined column group or null
*/
public static AColGroup appendN(AColGroup[] groups, int blen, int rlen) {
return groups[0].appendNInternal(groups, blen, rlen);
}
/**
* Append all column groups in the list provided together with this.
*
* A Important detail is the first entry in the group == this, and should not be appended twice.
*
* If it is not possible or very inefficient null is returned.
*
* @param groups The groups to combine.
* @param blen The normal number of rows in the groups
* @param rlen The total number of rows of all combined.
* @return A combined column group or null
*/
protected abstract AColGroup appendNInternal(AColGroup[] groups, int blen, int rlen);
/**
* Get the compression scheme for this column group to enable compression of other data.
*
* @return The compression scheme of this column group
*/
public abstract ICLAScheme getCompressionScheme();
/**
* Clear variables that can be recomputed from the allocation of this column group.
*/
public void clear() {
// do nothing
}
/**
* Recompress this column group into a new column group.
*
* @return A new or the same column group depending on optimization goal.
*/
public abstract AColGroup recompress();
/**
* Recompress this column group into a new column group of the given type.
*
* @param ct The compressionType that the column group should morph into
* @param nRow The number of rows in this columngroup.
* @return A new column group
*/
public AColGroup morph(CompressionType ct, int nRow) {
if(ct == getCompType())
return this;
else if(ct == CompressionType.DDCFOR)
return this; // it does not make sense to change to FOR.
else if(ct == CompressionType.UNCOMPRESSED) {
AColGroup cgMoved = this.copyAndSet(ColIndexFactory.create(_colIndexes.size()));
final long nnz = getNumberNonZeros(nRow);
MatrixBlock newDict = new MatrixBlock(nRow, _colIndexes.size(), nnz);
newDict.allocateBlock();
if(newDict.isInSparseFormat())
cgMoved.decompressToSparseBlock(newDict.getSparseBlock(), 0, nRow);
else
cgMoved.decompressToDenseBlock(newDict.getDenseBlock(), 0, nRow);
newDict.setNonZeros(nnz);
AColGroup cgUC = ColGroupUncompressed.create(newDict);
return cgUC.copyAndSet(_colIndexes);
}
else {
throw new NotImplementedException("Morphing from : " + getCompType() + " to " + ct + " is not implemented");
}
}
/**
* Get the compression info for this column group.
*
* @param nRow The number of rows in this column group.
* @return The compression info for this group.
*/
public abstract CompressedSizeInfoColGroup getCompressionInfo(int nRow);
/**
* Combine this column group with another
*
* @param other The other column group to combine with.
* @param nRow The number of rows in both column groups.
* @return A combined representation as a column group.
*/
public AColGroup combine(AColGroup other, int nRow) {
return CLALibCombineGroups.combine(this, other, nRow);
}
/**
* Get encoding of this column group.
*
* @return The encoding of the index structure.
*/
public IEncode getEncoding() {
throw new NotImplementedException();
}
public AColGroup sortColumnIndexes() {
if(_colIndexes.isSorted())
return this;
else {
int[] reorderingIndex = _colIndexes.getReorderingIndex();
IColIndex ni = _colIndexes.sort();
return fixColIndexes(ni, reorderingIndex);
}
}
protected abstract AColGroup fixColIndexes(IColIndex newColIndex, int[] reordering);
/**
* Perform row sum on the internal dictionaries, and return the same index structure.
*
* This method returns null on empty column groups.
*
* Note this method does not guarantee correct behavior if the given group is AMorphingGroup, instead it should be
* morphed to a valid columngroup via extractCommon first.
*
* @return The reduced colgroup.
*/
public abstract AColGroup reduceCols();
/**
* Selection (left matrix multiply)
*
* @param selection A sparse matrix with "max" a single one in each row all other values are zero.
* @param points The coordinates in the selection matrix to extract.
* @param ret The MatrixBlock to decompress the selected rows into
* @param rl The row to start at in the selection matrix
* @param ru the row to end at in the selection matrix (not inclusive)
*/
public final void selectionMultiply(MatrixBlock selection, P[] points, MatrixBlock ret, int rl, int ru) {
if(ret.isInSparseFormat())
sparseSelection(selection, points, ret, rl, ru);
else
denseSelection(selection, points, ret, rl, ru);
}
/**
* Get an approximate sparsity of this column group
*
* @return the approximate sparsity of this columngroup
*/
public abstract double getSparsity();
/**
* Sparse selection (left matrix multiply)
*
* @param selection A sparse matrix with "max" a single one in each row all other values are zero.
* @param points The coordinates in the selection matrix to extract.
* @param ret The Sparse MatrixBlock to decompress the selected rows into
* @param rl The row to start at in the selection matrix
* @param ru the row to end at in the selection matrix (not inclusive)
*/
protected abstract void sparseSelection(MatrixBlock selection, P[] points, MatrixBlock ret, int rl, int ru);
/**
* Dense selection (left matrix multiply)
*
* @param selection A sparse matrix with "max" a single one in each row all other values are zero.
* @param points The coordinates in the selection matrix to extract.
* @param ret The Dense MatrixBlock to decompress the selected rows into
* @param rl The row to start at in the selection matrix
* @param ru the row to end at in the selection matrix (not inclusive)
*/
protected abstract void denseSelection(MatrixBlock selection, P[] points, MatrixBlock ret, int rl, int ru);
/**
* Method to determine if the columnGroup have the same index structure as another. Note that the column indexes and
* dictionaries are allowed to be different.
*
* @param that the other column group
* @return if the index is the same.
*/
public boolean sameIndexStructure(AColGroup that) {
return false;
}
/**
* C bind the list of column groups with this column group. the list of elements provided in the index of each list
* is guaranteed to have the same index structures
*
* @param nRow The number of rows contained in all right and this column group.
* @param nCol The number of columns to shift the right hand side column groups over when combining, this should
* only effect the column indexes
* @param right The right hand side column groups to combine. NOTE only the index offset of the second nested list
* should be used. The reason for providing this nested list is to avoid redundant allocations in
* calling methods.
* @return A combined compressed column group of the same type as this!.
*/
public AColGroup combineWithSameIndex(int nRow, int nCol, List<AColGroup> right) {
// default decompress... nasty !
IColIndex combinedColIndex = combineColIndexes(nCol, right);
MatrixBlock decompressTarget = new MatrixBlock(nRow, combinedColIndex.size(), false);
decompressTarget.allocateDenseBlock();
DenseBlock db = decompressTarget.getDenseBlock();
final int nColInThisGroup = _colIndexes.size();
this.copyAndSet(ColIndexFactory.create(nColInThisGroup)).decompressToDenseBlock(db, 0, nRow);
for(int i = 0; i < right.size(); i++) {
right.get(i).copyAndSet(ColIndexFactory.create(i * nColInThisGroup, i * nColInThisGroup + nColInThisGroup))
.decompressToDenseBlock(db, 0, nRow);
}
decompressTarget.setNonZeros(nRow * combinedColIndex.size());
CompressedSizeInfoColGroup ci = new CompressedSizeInfoColGroup(ColIndexFactory.create(combinedColIndex.size()),
nRow, nRow, CompressionType.DDC);
CompressedSizeInfo csi = new CompressedSizeInfo(ci);
CompressionSettings cs = new CompressionSettingsBuilder().create();
return ColGroupFactory.compressColGroups(decompressTarget, csi, cs).get(0).copyAndSet(combinedColIndex);
}
/**
* C bind the given column group to this.
*
* @param nRow The number of rows contained in the right and this column group.
* @param nCol The number of columns in this.
* @param right The column group to c-bind.
* @return a new combined column groups.
*/
public AColGroup combineWithSameIndex(int nRow, int nCol, AColGroup right) {
IColIndex combinedColIndex = _colIndexes.combine(right._colIndexes.shift(nCol));
MatrixBlock decompressTarget = new MatrixBlock(nRow, combinedColIndex.size(), false);
decompressTarget.allocateDenseBlock();
DenseBlock db = decompressTarget.getDenseBlock();
final int nColInThisGroup = _colIndexes.size();
this.copyAndSet(ColIndexFactory.create(nColInThisGroup)).decompressToDenseBlock(db, 0, nRow);
right.copyAndSet(ColIndexFactory.create(nColInThisGroup, nColInThisGroup + nColInThisGroup))
.decompressToDenseBlock(db, 0, nRow);
decompressTarget.setNonZeros(nRow * combinedColIndex.size());
CompressedSizeInfoColGroup ci = new CompressedSizeInfoColGroup(ColIndexFactory.create(combinedColIndex.size()),
nRow, nRow, CompressionType.DDC);
CompressedSizeInfo csi = new CompressedSizeInfo(ci);
CompressionSettings cs = new CompressionSettingsBuilder().create();
return ColGroupFactory.compressColGroups(decompressTarget, csi, cs).get(0).copyAndSet(combinedColIndex);
// throw new NotImplementedException("Combine of : " + this.getClass().getSimpleName() + " not implemented");
}
protected IColIndex combineColIndexes(final int nCol, List<AColGroup> right) {
IColIndex combinedColIndex = _colIndexes;
for(int i = 0; i < right.size(); i++)
combinedColIndex = combinedColIndex.combine(right.get(i).getColIndices().shift(nCol * i + nCol));
return combinedColIndex;
}
/**
* This method returns a list of column groups that are naive splits of this column group as if it is reshaped.
*
* This means the column groups rows are split into x number of other column groups where x is the multiplier.
*
* The indexes are assigned round robbin to each of the output groups, meaning the first index is assigned.
*
* If for instance the 4. column group is split by a 2 multiplier and there was 5 columns in total originally. The
* output becomes 2 column groups at column index 4 and one at 9.
*
* If possible the split column groups should reuse pointers back to the original dictionaries!
*
* @param multiplier The number of column groups to split into
* @param nRow The number of rows in this column group in case the underlying column group does not know
* @param nColOrg The number of overall columns in the host CompressedMatrixBlock.
* @return a list of split column groups
*/
public abstract AColGroup[] splitReshape(final int multiplier, final int nRow, final int nColOrg);
/**
* This method returns a list of column groups that are naive splits of this column group as if it is reshaped.
*
* This means the column groups rows are split into x number of other column groups where x is the multiplier.
*
* The indexes are assigned round robbin to each of the output groups, meaning the first index is assigned.
*
* If for instance the 4. column group is split by a 2 multiplier and there was 5 columns in total originally. The
* output becomes 2 column groups at column index 4 and one at 9.
*
* If possible the split column groups should reuse pointers back to the original dictionaries!
*
* This specific variation is pushing down the parallelization given via the executor service provided. If not
* overwritten the default is to call the normal split reshape
*
* @param multiplier The number of column groups to split into
* @param nRow The number of rows in this column group in case the underlying column group does not know
* @param nColOrg The number of overall columns in the host CompressedMatrixBlock
* @param pool The executor service to submit parallel tasks to
* @throws Exception In case there is an error we throw the exception out instead of handling it
* @return a list of split column groups
*/
public AColGroup[] splitReshapePushDown(final int multiplier, final int nRow, final int nColOrg,
final ExecutorService pool) throws Exception {
return splitReshape(multiplier, nRow, nColOrg);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("\n%15s", "Type: "));
sb.append(this.getClass().getSimpleName());
sb.append(String.format("\n%15s", "Columns: "));
sb.append(_colIndexes);
return sb.toString();
}
}
|
googleapis/google-cloud-java | 37,937 | java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/templates.proto
// Protobuf Java Version: 3.25.8
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* RuntimeMetadata describing a runtime environment.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.RuntimeMetadata}
*/
public final class RuntimeMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.RuntimeMetadata)
RuntimeMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use RuntimeMetadata.newBuilder() to construct.
private RuntimeMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RuntimeMetadata() {
parameters_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RuntimeMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.TemplatesProto
.internal_static_google_dataflow_v1beta3_RuntimeMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.TemplatesProto
.internal_static_google_dataflow_v1beta3_RuntimeMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.RuntimeMetadata.class,
com.google.dataflow.v1beta3.RuntimeMetadata.Builder.class);
}
private int bitField0_;
public static final int SDK_INFO_FIELD_NUMBER = 1;
private com.google.dataflow.v1beta3.SDKInfo sdkInfo_;
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*
* @return Whether the sdkInfo field is set.
*/
@java.lang.Override
public boolean hasSdkInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*
* @return The sdkInfo.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SDKInfo getSdkInfo() {
return sdkInfo_ == null ? com.google.dataflow.v1beta3.SDKInfo.getDefaultInstance() : sdkInfo_;
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SDKInfoOrBuilder getSdkInfoOrBuilder() {
return sdkInfo_ == null ? com.google.dataflow.v1beta3.SDKInfo.getDefaultInstance() : sdkInfo_;
}
public static final int PARAMETERS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.dataflow.v1beta3.ParameterMetadata> parameters_;
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.dataflow.v1beta3.ParameterMetadata> getParametersList() {
return parameters_;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.dataflow.v1beta3.ParameterMetadataOrBuilder>
getParametersOrBuilderList() {
return parameters_;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
@java.lang.Override
public int getParametersCount() {
return parameters_.size();
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.ParameterMetadata getParameters(int index) {
return parameters_.get(index);
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.ParameterMetadataOrBuilder getParametersOrBuilder(int index) {
return parameters_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSdkInfo());
}
for (int i = 0; i < parameters_.size(); i++) {
output.writeMessage(2, parameters_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSdkInfo());
}
for (int i = 0; i < parameters_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, parameters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.RuntimeMetadata)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.RuntimeMetadata other =
(com.google.dataflow.v1beta3.RuntimeMetadata) obj;
if (hasSdkInfo() != other.hasSdkInfo()) return false;
if (hasSdkInfo()) {
if (!getSdkInfo().equals(other.getSdkInfo())) return false;
}
if (!getParametersList().equals(other.getParametersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSdkInfo()) {
hash = (37 * hash) + SDK_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSdkInfo().hashCode();
}
if (getParametersCount() > 0) {
hash = (37 * hash) + PARAMETERS_FIELD_NUMBER;
hash = (53 * hash) + getParametersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.RuntimeMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.RuntimeMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RuntimeMetadata describing a runtime environment.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.RuntimeMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.RuntimeMetadata)
com.google.dataflow.v1beta3.RuntimeMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.TemplatesProto
.internal_static_google_dataflow_v1beta3_RuntimeMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.TemplatesProto
.internal_static_google_dataflow_v1beta3_RuntimeMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.RuntimeMetadata.class,
com.google.dataflow.v1beta3.RuntimeMetadata.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.RuntimeMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSdkInfoFieldBuilder();
getParametersFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
sdkInfo_ = null;
if (sdkInfoBuilder_ != null) {
sdkInfoBuilder_.dispose();
sdkInfoBuilder_ = null;
}
if (parametersBuilder_ == null) {
parameters_ = java.util.Collections.emptyList();
} else {
parameters_ = null;
parametersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.TemplatesProto
.internal_static_google_dataflow_v1beta3_RuntimeMetadata_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.RuntimeMetadata getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.RuntimeMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.RuntimeMetadata build() {
com.google.dataflow.v1beta3.RuntimeMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.RuntimeMetadata buildPartial() {
com.google.dataflow.v1beta3.RuntimeMetadata result =
new com.google.dataflow.v1beta3.RuntimeMetadata(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.dataflow.v1beta3.RuntimeMetadata result) {
if (parametersBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
parameters_ = java.util.Collections.unmodifiableList(parameters_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.parameters_ = parameters_;
} else {
result.parameters_ = parametersBuilder_.build();
}
}
private void buildPartial0(com.google.dataflow.v1beta3.RuntimeMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.sdkInfo_ = sdkInfoBuilder_ == null ? sdkInfo_ : sdkInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.RuntimeMetadata) {
return mergeFrom((com.google.dataflow.v1beta3.RuntimeMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.RuntimeMetadata other) {
if (other == com.google.dataflow.v1beta3.RuntimeMetadata.getDefaultInstance()) return this;
if (other.hasSdkInfo()) {
mergeSdkInfo(other.getSdkInfo());
}
if (parametersBuilder_ == null) {
if (!other.parameters_.isEmpty()) {
if (parameters_.isEmpty()) {
parameters_ = other.parameters_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureParametersIsMutable();
parameters_.addAll(other.parameters_);
}
onChanged();
}
} else {
if (!other.parameters_.isEmpty()) {
if (parametersBuilder_.isEmpty()) {
parametersBuilder_.dispose();
parametersBuilder_ = null;
parameters_ = other.parameters_;
bitField0_ = (bitField0_ & ~0x00000002);
parametersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getParametersFieldBuilder()
: null;
} else {
parametersBuilder_.addAllMessages(other.parameters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSdkInfoFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.dataflow.v1beta3.ParameterMetadata m =
input.readMessage(
com.google.dataflow.v1beta3.ParameterMetadata.parser(), extensionRegistry);
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
parameters_.add(m);
} else {
parametersBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.dataflow.v1beta3.SDKInfo sdkInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.dataflow.v1beta3.SDKInfo,
com.google.dataflow.v1beta3.SDKInfo.Builder,
com.google.dataflow.v1beta3.SDKInfoOrBuilder>
sdkInfoBuilder_;
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*
* @return Whether the sdkInfo field is set.
*/
public boolean hasSdkInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*
* @return The sdkInfo.
*/
public com.google.dataflow.v1beta3.SDKInfo getSdkInfo() {
if (sdkInfoBuilder_ == null) {
return sdkInfo_ == null
? com.google.dataflow.v1beta3.SDKInfo.getDefaultInstance()
: sdkInfo_;
} else {
return sdkInfoBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public Builder setSdkInfo(com.google.dataflow.v1beta3.SDKInfo value) {
if (sdkInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sdkInfo_ = value;
} else {
sdkInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public Builder setSdkInfo(com.google.dataflow.v1beta3.SDKInfo.Builder builderForValue) {
if (sdkInfoBuilder_ == null) {
sdkInfo_ = builderForValue.build();
} else {
sdkInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public Builder mergeSdkInfo(com.google.dataflow.v1beta3.SDKInfo value) {
if (sdkInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& sdkInfo_ != null
&& sdkInfo_ != com.google.dataflow.v1beta3.SDKInfo.getDefaultInstance()) {
getSdkInfoBuilder().mergeFrom(value);
} else {
sdkInfo_ = value;
}
} else {
sdkInfoBuilder_.mergeFrom(value);
}
if (sdkInfo_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public Builder clearSdkInfo() {
bitField0_ = (bitField0_ & ~0x00000001);
sdkInfo_ = null;
if (sdkInfoBuilder_ != null) {
sdkInfoBuilder_.dispose();
sdkInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public com.google.dataflow.v1beta3.SDKInfo.Builder getSdkInfoBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSdkInfoFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
public com.google.dataflow.v1beta3.SDKInfoOrBuilder getSdkInfoOrBuilder() {
if (sdkInfoBuilder_ != null) {
return sdkInfoBuilder_.getMessageOrBuilder();
} else {
return sdkInfo_ == null
? com.google.dataflow.v1beta3.SDKInfo.getDefaultInstance()
: sdkInfo_;
}
}
/**
*
*
* <pre>
* SDK Info for the template.
* </pre>
*
* <code>.google.dataflow.v1beta3.SDKInfo sdk_info = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.dataflow.v1beta3.SDKInfo,
com.google.dataflow.v1beta3.SDKInfo.Builder,
com.google.dataflow.v1beta3.SDKInfoOrBuilder>
getSdkInfoFieldBuilder() {
if (sdkInfoBuilder_ == null) {
sdkInfoBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.dataflow.v1beta3.SDKInfo,
com.google.dataflow.v1beta3.SDKInfo.Builder,
com.google.dataflow.v1beta3.SDKInfoOrBuilder>(
getSdkInfo(), getParentForChildren(), isClean());
sdkInfo_ = null;
}
return sdkInfoBuilder_;
}
private java.util.List<com.google.dataflow.v1beta3.ParameterMetadata> parameters_ =
java.util.Collections.emptyList();
private void ensureParametersIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
parameters_ =
new java.util.ArrayList<com.google.dataflow.v1beta3.ParameterMetadata>(parameters_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.ParameterMetadata,
com.google.dataflow.v1beta3.ParameterMetadata.Builder,
com.google.dataflow.v1beta3.ParameterMetadataOrBuilder>
parametersBuilder_;
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.ParameterMetadata> getParametersList() {
if (parametersBuilder_ == null) {
return java.util.Collections.unmodifiableList(parameters_);
} else {
return parametersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public int getParametersCount() {
if (parametersBuilder_ == null) {
return parameters_.size();
} else {
return parametersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public com.google.dataflow.v1beta3.ParameterMetadata getParameters(int index) {
if (parametersBuilder_ == null) {
return parameters_.get(index);
} else {
return parametersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder setParameters(int index, com.google.dataflow.v1beta3.ParameterMetadata value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParametersIsMutable();
parameters_.set(index, value);
onChanged();
} else {
parametersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder setParameters(
int index, com.google.dataflow.v1beta3.ParameterMetadata.Builder builderForValue) {
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
parameters_.set(index, builderForValue.build());
onChanged();
} else {
parametersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder addParameters(com.google.dataflow.v1beta3.ParameterMetadata value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParametersIsMutable();
parameters_.add(value);
onChanged();
} else {
parametersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder addParameters(int index, com.google.dataflow.v1beta3.ParameterMetadata value) {
if (parametersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureParametersIsMutable();
parameters_.add(index, value);
onChanged();
} else {
parametersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder addParameters(
com.google.dataflow.v1beta3.ParameterMetadata.Builder builderForValue) {
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
parameters_.add(builderForValue.build());
onChanged();
} else {
parametersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder addParameters(
int index, com.google.dataflow.v1beta3.ParameterMetadata.Builder builderForValue) {
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
parameters_.add(index, builderForValue.build());
onChanged();
} else {
parametersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder addAllParameters(
java.lang.Iterable<? extends com.google.dataflow.v1beta3.ParameterMetadata> values) {
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, parameters_);
onChanged();
} else {
parametersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder clearParameters() {
if (parametersBuilder_ == null) {
parameters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
parametersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public Builder removeParameters(int index) {
if (parametersBuilder_ == null) {
ensureParametersIsMutable();
parameters_.remove(index);
onChanged();
} else {
parametersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public com.google.dataflow.v1beta3.ParameterMetadata.Builder getParametersBuilder(int index) {
return getParametersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public com.google.dataflow.v1beta3.ParameterMetadataOrBuilder getParametersOrBuilder(
int index) {
if (parametersBuilder_ == null) {
return parameters_.get(index);
} else {
return parametersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public java.util.List<? extends com.google.dataflow.v1beta3.ParameterMetadataOrBuilder>
getParametersOrBuilderList() {
if (parametersBuilder_ != null) {
return parametersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(parameters_);
}
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public com.google.dataflow.v1beta3.ParameterMetadata.Builder addParametersBuilder() {
return getParametersFieldBuilder()
.addBuilder(com.google.dataflow.v1beta3.ParameterMetadata.getDefaultInstance());
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public com.google.dataflow.v1beta3.ParameterMetadata.Builder addParametersBuilder(int index) {
return getParametersFieldBuilder()
.addBuilder(index, com.google.dataflow.v1beta3.ParameterMetadata.getDefaultInstance());
}
/**
*
*
* <pre>
* The parameters for the template.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.ParameterMetadata parameters = 2;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.ParameterMetadata.Builder>
getParametersBuilderList() {
return getParametersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.ParameterMetadata,
com.google.dataflow.v1beta3.ParameterMetadata.Builder,
com.google.dataflow.v1beta3.ParameterMetadataOrBuilder>
getParametersFieldBuilder() {
if (parametersBuilder_ == null) {
parametersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.ParameterMetadata,
com.google.dataflow.v1beta3.ParameterMetadata.Builder,
com.google.dataflow.v1beta3.ParameterMetadataOrBuilder>(
parameters_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
parameters_ = null;
}
return parametersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.RuntimeMetadata)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.RuntimeMetadata)
private static final com.google.dataflow.v1beta3.RuntimeMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.RuntimeMetadata();
}
public static com.google.dataflow.v1beta3.RuntimeMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RuntimeMetadata> PARSER =
new com.google.protobuf.AbstractParser<RuntimeMetadata>() {
@java.lang.Override
public RuntimeMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RuntimeMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RuntimeMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.RuntimeMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/guava | 37,565 | android/guava-tests/test/com/google/common/collect/ImmutableMapTest.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Maps.immutableEntry;
import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.testing.SerializableTester.reserialize;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonMap;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.testing.CollectionTestSuiteBuilder;
import com.google.common.collect.testing.ListTestSuiteBuilder;
import com.google.common.collect.testing.MapTestSuiteBuilder;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapCopyOfEntriesGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapCopyOfEnumMapGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapCopyOfGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapEntryListGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapKeyListGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapUnhashableValuesGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapValueListGenerator;
import com.google.common.collect.testing.google.MapGenerators.ImmutableMapValuesAsSingletonSetGenerator;
import com.google.common.testing.EqualsTester;
import com.google.common.testing.NullPointerTester;
import java.io.ByteArrayOutputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* Tests for {@link ImmutableMap}.
*
* @author Kevin Bourrillion
* @author Jesse Wilson
*/
@GwtCompatible
@SuppressWarnings("AlwaysThrows")
@NullMarked
public class ImmutableMapTest extends TestCase {
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(ImmutableMapTest.class);
suite.addTest(
MapTestSuiteBuilder.using(new ImmutableMapGenerator())
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS,
CollectionFeature.KNOWN_ORDER,
MapFeature.REJECTS_DUPLICATES_AT_CREATION,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap")
.createTestSuite());
suite.addTest(
MapTestSuiteBuilder.using(new ImmutableMapCopyOfGenerator())
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap.copyOf[Map]")
.createTestSuite());
suite.addTest(
MapTestSuiteBuilder.using(new ImmutableMapCopyOfEntriesGenerator())
.withFeatures(
CollectionSize.ANY,
MapFeature.REJECTS_DUPLICATES_AT_CREATION,
CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap.copyOf[Iterable<Entry>]")
.createTestSuite());
suite.addTest(
MapTestSuiteBuilder.using(new ImmutableMapCopyOfEnumMapGenerator())
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap.copyOf[EnumMap]")
.createTestSuite());
suite.addTest(
MapTestSuiteBuilder.using(new ImmutableMapValuesAsSingletonSetGenerator())
.withFeatures(
CollectionSize.ANY,
MapFeature.REJECTS_DUPLICATES_AT_CREATION,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap.asMultimap.asMap")
.createTestSuite());
suite.addTest(
CollectionTestSuiteBuilder.using(new ImmutableMapUnhashableValuesGenerator())
.withFeatures(
CollectionSize.ANY,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.ALLOWS_NULL_QUERIES)
.named("ImmutableMap.values, unhashable")
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(new ImmutableMapKeyListGenerator())
.named("ImmutableMap.keySet.asList")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.REJECTS_DUPLICATES_AT_CREATION,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(new ImmutableMapEntryListGenerator())
.named("ImmutableMap.entrySet.asList")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.REJECTS_DUPLICATES_AT_CREATION,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(new ImmutableMapValueListGenerator())
.named("ImmutableMap.values.asList")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
return suite;
}
// Creation tests
public void testEmptyBuilder() {
ImmutableMap<String, Integer> map = new Builder<String, Integer>().buildOrThrow();
assertEquals(Collections.<String, Integer>emptyMap(), map);
}
public void testSingletonBuilder() {
ImmutableMap<String, Integer> map = new Builder<String, Integer>().put("one", 1).buildOrThrow();
assertMapEquals(map, "one", 1);
}
public void testBuilder() {
ImmutableMap<String, Integer> map =
new Builder<String, Integer>()
.put("one", 1)
.put("two", 2)
.put("three", 3)
.put("four", 4)
.put("five", 5)
.buildOrThrow();
assertMapEquals(map, "one", 1, "two", 2, "three", 3, "four", 4, "five", 5);
}
@GwtIncompatible
public void testBuilderExactlySizedReusesArray() {
ImmutableMap.Builder<Integer, Integer> builder = ImmutableMap.builderWithExpectedSize(10);
Object[] builderArray = builder.alternatingKeysAndValues;
for (int i = 0; i < 10; i++) {
builder.put(i, i);
}
Object[] builderArrayAfterPuts = builder.alternatingKeysAndValues;
RegularImmutableMap<Integer, Integer> map =
(RegularImmutableMap<Integer, Integer>) builder.buildOrThrow();
Object[] mapInternalArray = map.alternatingKeysAndValues;
assertSame(builderArray, builderArrayAfterPuts);
assertSame(builderArray, mapInternalArray);
}
public void testBuilder_orderEntriesByValue() {
ImmutableMap<String, Integer> map =
new Builder<String, Integer>()
.orderEntriesByValue(Ordering.natural())
.put("three", 3)
.put("one", 1)
.put("five", 5)
.put("four", 4)
.put("two", 2)
.buildOrThrow();
assertMapEquals(map, "one", 1, "two", 2, "three", 3, "four", 4, "five", 5);
}
public void testBuilder_orderEntriesByValueAfterExactSizeBuild() {
Builder<String, Integer> builder = new Builder<String, Integer>(2).put("four", 4).put("one", 1);
ImmutableMap<String, Integer> keyOrdered = builder.buildOrThrow();
ImmutableMap<String, Integer> valueOrdered =
builder.orderEntriesByValue(Ordering.natural()).buildOrThrow();
assertMapEquals(keyOrdered, "four", 4, "one", 1);
assertMapEquals(valueOrdered, "one", 1, "four", 4);
}
public void testBuilder_orderEntriesByValue_usedTwiceFails() {
ImmutableMap.Builder<String, Integer> builder =
new Builder<String, Integer>().orderEntriesByValue(Ordering.natural());
assertThrows(
IllegalStateException.class, () -> builder.orderEntriesByValue(Ordering.natural()));
}
@GwtIncompatible // we haven't implemented this
public void testBuilder_orderEntriesByValue_keepingLast() {
ImmutableMap.Builder<String, Integer> builder =
new Builder<String, Integer>()
.orderEntriesByValue(Ordering.natural())
.put("three", 3)
.put("one", 1)
.put("five", 5)
.put("four", 3)
.put("four", 5)
.put("four", 4) // this should win because it's last
.put("two", 2);
assertMapEquals(
builder.buildKeepingLast(), "one", 1, "two", 2, "three", 3, "four", 4, "five", 5);
assertThrows(IllegalArgumentException.class, () -> builder.buildOrThrow());
}
@GwtIncompatible // we haven't implemented this
public void testBuilder_orderEntriesByValueAfterExactSizeBuild_keepingLastWithoutDuplicates() {
ImmutableMap.Builder<String, Integer> builder =
new Builder<String, Integer>(3)
.orderEntriesByValue(Ordering.natural())
.put("three", 3)
.put("one", 1);
assertMapEquals(builder.buildKeepingLast(), "one", 1, "three", 3);
}
@GwtIncompatible // we haven't implemented this
public void testBuilder_orderEntriesByValue_keepingLast_builderSizeFieldPreserved() {
ImmutableMap.Builder<String, Integer> builder =
new Builder<String, Integer>()
.orderEntriesByValue(Ordering.natural())
.put("one", 1)
.put("one", 1);
assertMapEquals(builder.buildKeepingLast(), "one", 1);
assertThrows(IllegalArgumentException.class, () -> builder.buildOrThrow());
}
public void testBuilder_withImmutableEntry() {
ImmutableMap<String, Integer> map =
new Builder<String, Integer>().put(immutableEntry("one", 1)).buildOrThrow();
assertMapEquals(map, "one", 1);
}
public void testBuilder_withImmutableEntryAndNullContents() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(
NullPointerException.class, () -> builder.put(immutableEntry("one", (Integer) null)));
assertThrows(NullPointerException.class, () -> builder.put(immutableEntry((String) null, 1)));
}
private static class StringHolder {
@Nullable String string;
}
public void testBuilder_withMutableEntry() {
ImmutableMap.Builder<String, Integer> builder = new Builder<>();
StringHolder holder = new StringHolder();
holder.string = "one";
Entry<String, Integer> entry =
new AbstractMapEntry<String, Integer>() {
@Override
public String getKey() {
return holder.string;
}
@Override
public Integer getValue() {
return 1;
}
};
builder.put(entry);
holder.string = "two";
assertMapEquals(builder.buildOrThrow(), "one", 1);
}
public void testBuilderPutAllWithEmptyMap() {
ImmutableMap<String, Integer> map =
new Builder<String, Integer>()
.putAll(Collections.<String, Integer>emptyMap())
.buildOrThrow();
assertEquals(Collections.<String, Integer>emptyMap(), map);
}
public void testBuilderPutAll() {
Map<String, Integer> toPut = new LinkedHashMap<>();
toPut.put("one", 1);
toPut.put("two", 2);
toPut.put("three", 3);
Map<String, Integer> moreToPut = new LinkedHashMap<>();
moreToPut.put("four", 4);
moreToPut.put("five", 5);
ImmutableMap<String, Integer> map =
new Builder<String, Integer>().putAll(toPut).putAll(moreToPut).buildOrThrow();
assertMapEquals(map, "one", 1, "two", 2, "three", 3, "four", 4, "five", 5);
}
public void testBuilderReuse() {
Builder<String, Integer> builder = new Builder<>();
ImmutableMap<String, Integer> mapOne = builder.put("one", 1).put("two", 2).buildOrThrow();
ImmutableMap<String, Integer> mapTwo = builder.put("three", 3).put("four", 4).buildOrThrow();
assertMapEquals(mapOne, "one", 1, "two", 2);
assertMapEquals(mapTwo, "one", 1, "two", 2, "three", 3, "four", 4);
}
public void testBuilderPutNullKeyFailsAtomically() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(NullPointerException.class, () -> builder.put(null, 1));
builder.put("foo", 2);
assertMapEquals(builder.buildOrThrow(), "foo", 2);
}
public void testBuilderPutImmutableEntryWithNullKeyFailsAtomically() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(NullPointerException.class, () -> builder.put(immutableEntry((String) null, 1)));
builder.put("foo", 2);
assertMapEquals(builder.buildOrThrow(), "foo", 2);
}
// for GWT compatibility
static class SimpleEntry<K, V> extends AbstractMapEntry<K, V> {
public K key;
public V value;
SimpleEntry(K key, V value) {
this.key = key;
this.value = value;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
}
public void testBuilderPutMutableEntryWithNullKeyFailsAtomically() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(
NullPointerException.class, () -> builder.put(new SimpleEntry<String, Integer>(null, 1)));
builder.put("foo", 2);
assertMapEquals(builder.buildOrThrow(), "foo", 2);
}
public void testBuilderPutNullKey() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(NullPointerException.class, () -> builder.put(null, 1));
}
public void testBuilderPutNullValue() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(NullPointerException.class, () -> builder.put("one", null));
}
public void testBuilderPutNullKeyViaPutAll() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(
NullPointerException.class,
() -> builder.putAll(Collections.<String, Integer>singletonMap(null, 1)));
}
public void testBuilderPutNullValueViaPutAll() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(
NullPointerException.class,
() -> builder.putAll(Collections.<String, Integer>singletonMap("one", null)));
}
public void testPuttingTheSameKeyTwiceThrowsOnBuild() {
Builder<String, Integer> builder =
new Builder<String, Integer>()
.put("one", 1)
.put("one", 1); // throwing on this line might be better but it's too late to change
assertThrows(IllegalArgumentException.class, () -> builder.buildOrThrow());
}
public void testBuildKeepingLast_allowsOverwrite() {
Builder<Integer, String> builder =
new Builder<Integer, String>()
.put(1, "un")
.put(2, "deux")
.put(70, "soixante-dix")
.put(70, "septante")
.put(70, "seventy")
.put(1, "one")
.put(2, "two");
ImmutableMap<Integer, String> map = builder.buildKeepingLast();
assertMapEquals(map, 1, "one", 2, "two", 70, "seventy");
}
public void testBuildKeepingLast_smallTableSameHash() {
String key1 = "QED";
String key2 = "R&D";
assertThat(key1.hashCode()).isEqualTo(key2.hashCode());
ImmutableMap<String, Integer> map =
ImmutableMap.<String, Integer>builder()
.put(key1, 1)
.put(key2, 2)
.put(key1, 3)
.put(key2, 4)
.buildKeepingLast();
assertMapEquals(map, key1, 3, key2, 4);
}
// The java7 branch has different code depending on whether the entry indexes fit in a byte,
// short, or int. The small table in testBuildKeepingLast_allowsOverwrite will test the byte
// case. This method tests the short case.
public void testBuildKeepingLast_shortTable() {
Builder<Integer, String> builder = ImmutableMap.builder();
Map<Integer, String> expected = new LinkedHashMap<>();
for (int i = 0; i < 1000; i++) {
// Truncate to even key, so we have put(0, "0") then put(0, "1"). Half the entries are
// duplicates.
Integer key = i & ~1;
String value = String.valueOf(i);
builder.put(key, value);
expected.put(key, value);
}
ImmutableMap<Integer, String> map = builder.buildKeepingLast();
assertThat(map).hasSize(500);
assertThat(map).containsExactlyEntriesIn(expected).inOrder();
}
// This method tests the int case.
public void testBuildKeepingLast_bigTable() {
Builder<Integer, String> builder = ImmutableMap.builder();
Map<Integer, String> expected = new LinkedHashMap<>();
for (int i = 0; i < 200_000; i++) {
// Truncate to even key, so we have put(0, "0") then put(0, "1"). Half the entries are
// duplicates.
Integer key = i & ~1;
String value = String.valueOf(i);
builder.put(key, value);
expected.put(key, value);
}
ImmutableMap<Integer, String> map = builder.buildKeepingLast();
assertThat(map).hasSize(100_000);
assertThat(map).containsExactlyEntriesIn(expected).inOrder();
}
private static class ClassWithTerribleHashCode implements Comparable<ClassWithTerribleHashCode> {
private final int value;
ClassWithTerribleHashCode(int value) {
this.value = value;
}
@Override
public int compareTo(ClassWithTerribleHashCode that) {
return Integer.compare(this.value, that.value);
}
@Override
public boolean equals(@Nullable Object x) {
return x instanceof ClassWithTerribleHashCode
&& ((ClassWithTerribleHashCode) x).value == value;
}
@Override
public int hashCode() {
return 23;
}
@Override
public String toString() {
return "ClassWithTerribleHashCode(" + value + ")";
}
}
@GwtIncompatible
public void testBuildKeepingLast_collisions() {
Map<ClassWithTerribleHashCode, Integer> expected = new LinkedHashMap<>();
Builder<ClassWithTerribleHashCode, Integer> builder = new Builder<>();
int size = 18;
for (int i = 0; i < size; i++) {
ClassWithTerribleHashCode key = new ClassWithTerribleHashCode(i);
builder.put(key, i);
builder.put(key, -i);
expected.put(key, -i);
}
ImmutableMap<ClassWithTerribleHashCode, Integer> map = builder.buildKeepingLast();
assertThat(map).containsExactlyEntriesIn(expected).inOrder();
}
@GwtIncompatible // Pattern, Matcher
public void testBuilder_keepingLast_thenOrThrow() {
ImmutableMap.Builder<String, Integer> builder =
new Builder<String, Integer>()
.put("three", 3)
.put("one", 1)
.put("five", 5)
.put("four", 3)
.put("four", 5)
.put("four", 4) // this should win because it's last
.put("two", 2);
assertMapEquals(
builder.buildKeepingLast(), "three", 3, "one", 1, "five", 5, "four", 4, "two", 2);
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> builder.buildOrThrow());
// We don't really care which values the exception message contains, but they should be
// different from each other. If buildKeepingLast() collapsed duplicates, that might end up not
// being true.
Pattern pattern = Pattern.compile("Multiple entries with same key: four=(.*) and four=(.*)");
assertThat(expected).hasMessageThat().matches(pattern);
Matcher matcher = pattern.matcher(expected.getMessage());
assertThat(matcher.matches()).isTrue();
assertThat(matcher.group(1)).isNotEqualTo(matcher.group(2));
}
public void testOf() {
assertMapEquals(ImmutableMap.of("one", 1), "one", 1);
assertMapEquals(ImmutableMap.of("one", 1, "two", 2), "one", 1, "two", 2);
assertMapEquals(
ImmutableMap.of("one", 1, "two", 2, "three", 3), "one", 1, "two", 2, "three", 3);
assertMapEquals(
ImmutableMap.of("one", 1, "two", 2, "three", 3, "four", 4),
"one",
1,
"two",
2,
"three",
3,
"four",
4);
assertMapEquals(
ImmutableMap.of("one", 1, "two", 2, "three", 3, "four", 4, "five", 5),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5);
assertMapEquals(
ImmutableMap.of(
"one", 1,
"two", 2,
"three", 3,
"four", 4,
"five", 5,
"six", 6),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5,
"six",
6);
assertMapEquals(
ImmutableMap.of(
"one", 1,
"two", 2,
"three", 3,
"four", 4,
"five", 5,
"six", 6,
"seven", 7),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5,
"six",
6,
"seven",
7);
assertMapEquals(
ImmutableMap.of(
"one", 1,
"two", 2,
"three", 3,
"four", 4,
"five", 5,
"six", 6,
"seven", 7,
"eight", 8),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5,
"six",
6,
"seven",
7,
"eight",
8);
assertMapEquals(
ImmutableMap.of(
"one", 1,
"two", 2,
"three", 3,
"four", 4,
"five", 5,
"six", 6,
"seven", 7,
"eight", 8,
"nine", 9),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5,
"six",
6,
"seven",
7,
"eight",
8,
"nine",
9);
assertMapEquals(
ImmutableMap.of(
"one", 1,
"two", 2,
"three", 3,
"four", 4,
"five", 5,
"six", 6,
"seven", 7,
"eight", 8,
"nine", 9,
"ten", 10),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5,
"six",
6,
"seven",
7,
"eight",
8,
"nine",
9,
"ten",
10);
}
public void testOfNullKey() {
assertThrows(NullPointerException.class, () -> ImmutableMap.of(null, 1));
assertThrows(NullPointerException.class, () -> ImmutableMap.of("one", 1, null, 2));
}
public void testOfNullValue() {
assertThrows(NullPointerException.class, () -> ImmutableMap.of("one", null));
assertThrows(NullPointerException.class, () -> ImmutableMap.of("one", 1, "two", null));
}
public void testOfWithDuplicateKey() {
assertThrows(IllegalArgumentException.class, () -> ImmutableMap.of("one", 1, "one", 1));
}
public void testCopyOfEmptyMap() {
ImmutableMap<String, Integer> copy =
ImmutableMap.copyOf(Collections.<String, Integer>emptyMap());
assertEquals(Collections.<String, Integer>emptyMap(), copy);
assertSame(copy, ImmutableMap.copyOf(copy));
}
public void testCopyOfSingletonMap() {
ImmutableMap<String, Integer> copy = ImmutableMap.copyOf(singletonMap("one", 1));
assertMapEquals(copy, "one", 1);
assertSame(copy, ImmutableMap.copyOf(copy));
}
public void testCopyOf() {
Map<String, Integer> original = new LinkedHashMap<>();
original.put("one", 1);
original.put("two", 2);
original.put("three", 3);
ImmutableMap<String, Integer> copy = ImmutableMap.copyOf(original);
assertMapEquals(copy, "one", 1, "two", 2, "three", 3);
assertSame(copy, ImmutableMap.copyOf(copy));
}
// TODO(b/172823566): Use mainline testToImmutableMap once CollectorTester is usable to java7.
public void testToImmutableMap_java7_combine() {
ImmutableMap.Builder<String, Integer> zis =
ImmutableMap.<String, Integer>builder().put("one", 1);
ImmutableMap.Builder<String, Integer> zat =
ImmutableMap.<String, Integer>builder().put("two", 2).put("three", 3);
assertMapEquals(zis.combine(zat).build(), "one", 1, "two", 2, "three", 3);
}
// TODO(b/172823566): Use mainline testToImmutableMap once CollectorTester is usable to java7.
public void testToImmutableMap_exceptionOnDuplicateKey_java7_combine() {
ImmutableMap.Builder<String, Integer> zis =
ImmutableMap.<String, Integer>builder().put("one", 1).put("two", 2);
ImmutableMap.Builder<String, Integer> zat =
ImmutableMap.<String, Integer>builder().put("two", 22).put("three", 3);
assertThrows(IllegalArgumentException.class, () -> zis.combine(zat).build());
}
public static void hashtableTestHelper(ImmutableList<Integer> sizes) {
for (int size : sizes) {
Builder<Integer, Integer> builder = ImmutableMap.builderWithExpectedSize(size);
for (int i = 0; i < size; i++) {
Integer integer = i;
builder.put(integer, integer);
}
ImmutableMap<Integer, Integer> map = builder.build();
assertEquals(size, map.size());
int entries = 0;
for (Integer key : map.keySet()) {
assertEquals(entries, key.intValue());
assertSame(key, map.get(key));
entries++;
}
assertEquals(size, entries);
}
}
public void testByteArrayHashtable() {
hashtableTestHelper(ImmutableList.of(2, 89));
}
public void testShortArrayHashtable() {
hashtableTestHelper(ImmutableList.of(90, 22937));
}
public void testIntArrayHashtable() {
hashtableTestHelper(ImmutableList.of(22938));
}
// Non-creation tests
public void testNullGet() {
ImmutableMap<String, Integer> map = ImmutableMap.of("one", 1);
assertNull(map.get(null));
}
public void testAsMultimap() {
ImmutableMap<String, Integer> map =
ImmutableMap.of("one", 1, "won", 1, "two", 2, "too", 2, "three", 3);
ImmutableSetMultimap<String, Integer> expected =
ImmutableSetMultimap.of("one", 1, "won", 1, "two", 2, "too", 2, "three", 3);
assertEquals(expected, map.asMultimap());
}
public void testAsMultimapWhenEmpty() {
ImmutableMap<String, Integer> map = ImmutableMap.of();
ImmutableSetMultimap<String, Integer> expected = ImmutableSetMultimap.of();
assertEquals(expected, map.asMultimap());
}
public void testAsMultimapCaches() {
ImmutableMap<String, Integer> map = ImmutableMap.of("one", 1);
ImmutableSetMultimap<String, Integer> multimap1 = map.asMultimap();
ImmutableSetMultimap<String, Integer> multimap2 = map.asMultimap();
assertEquals(1, multimap1.asMap().size());
assertSame(multimap1, multimap2);
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(ImmutableMap.class);
tester.testAllPublicInstanceMethods(new ImmutableMap.Builder<Object, Object>());
tester.testAllPublicInstanceMethods(ImmutableMap.of());
tester.testAllPublicInstanceMethods(ImmutableMap.of("one", 1));
tester.testAllPublicInstanceMethods(ImmutableMap.of("one", 1, "two", 2, "three", 3));
}
private static <K, V> void assertMapEquals(Map<K, V> map, Object... alternatingKeysAndValues) {
Map<Object, Object> expected = new LinkedHashMap<>();
for (int i = 0; i < alternatingKeysAndValues.length; i += 2) {
expected.put(alternatingKeysAndValues[i], alternatingKeysAndValues[i + 1]);
}
assertThat(map).containsExactlyEntriesIn(expected).inOrder();
}
private static class IntHolder implements Serializable {
private int value;
IntHolder(int value) {
this.value = value;
}
@Override
public boolean equals(@Nullable Object o) {
return (o instanceof IntHolder) && ((IntHolder) o).value == value;
}
@Override
public int hashCode() {
return value;
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 5;
}
public void testMutableValues() {
IntHolder holderA = new IntHolder(1);
IntHolder holderB = new IntHolder(2);
Map<String, IntHolder> map = ImmutableMap.of("a", holderA, "b", holderB);
holderA.value = 3;
assertTrue(map.entrySet().contains(immutableEntry("a", new IntHolder(3))));
Map<String, Integer> intMap = ImmutableMap.of("a", 3, "b", 2);
assertEquals(intMap.hashCode(), map.entrySet().hashCode());
assertEquals(intMap.hashCode(), map.hashCode());
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testViewSerialization() {
Map<String, Integer> map = ImmutableMap.of("one", 1, "two", 2, "three", 3);
LenientSerializableTester.reserializeAndAssertLenient(map.entrySet());
LenientSerializableTester.reserializeAndAssertLenient(map.keySet());
Collection<Integer> reserializedValues = reserialize(map.values());
assertEquals(new ArrayList<>(map.values()), new ArrayList<>(reserializedValues));
assertTrue(reserializedValues instanceof ImmutableCollection);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testKeySetIsSerializable_regularImmutableMap() {
class NonSerializableClass {}
Map<String, NonSerializableClass> map =
RegularImmutableMap.create(1, new Object[] {"one", new NonSerializableClass()});
Set<String> set = map.keySet();
LenientSerializableTester.reserializeAndAssertLenient(set);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testValuesCollectionIsSerializable_regularImmutableMap() {
class NonSerializableClass {}
Map<NonSerializableClass, String> map =
RegularImmutableMap.create(1, new Object[] {new NonSerializableClass(), "value"});
Collection<String> collection = map.values();
LenientSerializableTester.reserializeAndAssertElementsEqual(collection);
}
// TODO: Re-enable this test after moving to new serialization format in ImmutableMap.
@J2ktIncompatible
@GwtIncompatible // SerializableTester
@SuppressWarnings("unchecked")
public void ignore_testSerializationNoDuplication_regularImmutableMap() throws Exception {
// Tests that serializing a map, its keySet, and values only writes the underlying data once.
Object[] entries = new Object[2000];
for (int i = 0; i < entries.length; i++) {
entries[i] = i;
}
ImmutableMap<Integer, Integer> map = RegularImmutableMap.create(entries.length / 2, entries);
Set<Integer> keySet = map.keySet();
Collection<Integer> values = map.values();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bytes);
oos.writeObject(map);
oos.flush();
int mapSize = bytes.size();
oos.writeObject(keySet);
oos.writeObject(values);
oos.close();
int finalSize = bytes.size();
assertThat(finalSize - mapSize).isLessThan(100);
}
public void testEquals() {
new EqualsTester()
.addEqualityGroup(
ImmutableMap.of(),
ImmutableMap.builder().buildOrThrow(),
ImmutableMap.ofEntries(),
map())
.addEqualityGroup(
ImmutableMap.of(1, 1),
ImmutableMap.builder().put(1, 1).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1)),
map(1, 1))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 2),
ImmutableMap.builder().put(1, 1).put(2, 2).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 2)),
map(1, 1, 2, 2))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 2, 3, 3),
ImmutableMap.builder().put(1, 1).put(2, 2).put(3, 3).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 2), entry(3, 3)),
map(1, 1, 2, 2, 3, 3))
.addEqualityGroup(
ImmutableMap.of(1, 4, 2, 2, 3, 3),
ImmutableMap.builder().put(1, 4).put(2, 2).put(3, 3).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 4), entry(2, 2), entry(3, 3)),
map(1, 4, 2, 2, 3, 3))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 4, 3, 3),
ImmutableMap.builder().put(1, 1).put(2, 4).put(3, 3).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 4), entry(3, 3)),
map(1, 1, 2, 4, 3, 3))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 2, 3, 4),
ImmutableMap.builder().put(1, 1).put(2, 2).put(3, 4).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 2), entry(3, 4)),
map(1, 1, 2, 2, 3, 4))
.addEqualityGroup(
ImmutableMap.of(1, 2, 2, 3, 3, 1),
ImmutableMap.builder().put(1, 2).put(2, 3).put(3, 1).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 2), entry(2, 3), entry(3, 1)),
map(1, 2, 2, 3, 3, 1))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 2, 3, 3, 4, 4),
ImmutableMap.builder().put(1, 1).put(2, 2).put(3, 3).put(4, 4).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 2), entry(3, 3), entry(4, 4)),
map(1, 1, 2, 2, 3, 3, 4, 4))
.addEqualityGroup(
ImmutableMap.of(1, 1, 2, 2, 3, 3, 4, 4, 5, 5),
ImmutableMap.builder().put(1, 1).put(2, 2).put(3, 3).put(4, 4).put(5, 5).buildOrThrow(),
ImmutableMap.ofEntries(entry(1, 1), entry(2, 2), entry(3, 3), entry(4, 4), entry(5, 5)),
map(1, 1, 2, 2, 3, 3, 4, 4, 5, 5))
.testEquals();
}
public void testOfEntriesNull() {
Entry<@Nullable Integer, @Nullable Integer> nullKey = entry(null, 23);
assertThrows(
NullPointerException.class,
() -> ImmutableMap.ofEntries((Entry<Integer, Integer>) nullKey));
Entry<@Nullable Integer, @Nullable Integer> nullValue = entry(23, null);
assertThrows(
NullPointerException.class,
() -> ImmutableMap.ofEntries((Entry<Integer, Integer>) nullValue));
}
private static <T> Map<T, T> map(T... keysAndValues) {
assertThat(keysAndValues.length % 2).isEqualTo(0);
LinkedHashMap<T, T> map = new LinkedHashMap<>();
for (int i = 0; i < keysAndValues.length; i += 2) {
T key = keysAndValues[i];
T value = keysAndValues[i + 1];
T old = map.put(key, value);
assertWithMessage("Key %s set to %s and %s", key, value, old).that(old).isNull();
}
return map;
}
private static <T extends @Nullable Object> Entry<T, T> entry(T key, T value) {
return new AbstractMap.SimpleImmutableEntry<>(key, value);
}
public void testCopyOfMutableEntryList() {
List<Entry<String, String>> entryList =
asList(new AbstractMap.SimpleEntry<>("a", "1"), new AbstractMap.SimpleEntry<>("b", "2"));
ImmutableMap<String, String> map = ImmutableMap.copyOf(entryList);
assertThat(map).containsExactly("a", "1", "b", "2").inOrder();
entryList.get(0).setValue("3");
assertThat(map).containsExactly("a", "1", "b", "2").inOrder();
}
public void testBuilderPutAllEntryList() {
List<Entry<String, String>> entryList =
asList(new AbstractMap.SimpleEntry<>("a", "1"), new AbstractMap.SimpleEntry<>("b", "2"));
ImmutableMap<String, String> map =
ImmutableMap.<String, String>builder().putAll(entryList).buildOrThrow();
assertThat(map).containsExactly("a", "1", "b", "2").inOrder();
entryList.get(0).setValue("3");
assertThat(map).containsExactly("a", "1", "b", "2").inOrder();
}
}
|
googleapis/google-cloud-java | 37,940 | java-workflow-executions/proto-google-cloud-workflow-executions-v1beta/src/main/java/com/google/cloud/workflows/executions/v1beta/ListExecutionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/workflows/executions/v1beta/executions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.workflows.executions.v1beta;
/**
*
*
* <pre>
* Request for the
* [ListExecutions][google.cloud.workflows.executions.v1beta.Executions.ListExecutions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.executions.v1beta.ListExecutionsRequest}
*/
public final class ListExecutionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.workflows.executions.v1beta.ListExecutionsRequest)
ListExecutionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExecutionsRequest.newBuilder() to construct.
private ListExecutionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExecutionsRequest() {
parent_ = "";
pageToken_ = "";
view_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExecutionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.executions.v1beta.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1beta_ListExecutionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.executions.v1beta.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1beta_ListExecutionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest.class,
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Maximum number of executions to return per call.
* Max supported value depends on the selected Execution view: it's 10000 for
* BASIC and 100 for FULL. The default value used if the field is not
* specified is 100, regardless of the selected view. Values greater than
* the max value will be coerced down to it.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VIEW_FIELD_NUMBER = 4;
private int view_ = 0;
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for view.
*/
@java.lang.Override
public int getViewValue() {
return view_;
}
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The view.
*/
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ExecutionView getView() {
com.google.cloud.workflows.executions.v1beta.ExecutionView result =
com.google.cloud.workflows.executions.v1beta.ExecutionView.forNumber(view_);
return result == null
? com.google.cloud.workflows.executions.v1beta.ExecutionView.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (view_
!= com.google.cloud.workflows.executions.v1beta.ExecutionView.EXECUTION_VIEW_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, view_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (view_
!= com.google.cloud.workflows.executions.v1beta.ExecutionView.EXECUTION_VIEW_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, view_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest)) {
return super.equals(obj);
}
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest other =
(com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (view_ != other.view_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + VIEW_FIELD_NUMBER;
hash = (53 * hash) + view_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the
* [ListExecutions][google.cloud.workflows.executions.v1beta.Executions.ListExecutions]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.workflows.executions.v1beta.ListExecutionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.workflows.executions.v1beta.ListExecutionsRequest)
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.workflows.executions.v1beta.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1beta_ListExecutionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.workflows.executions.v1beta.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1beta_ListExecutionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest.class,
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest.Builder.class);
}
// Construct using
// com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
view_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.workflows.executions.v1beta.ExecutionsProto
.internal_static_google_cloud_workflows_executions_v1beta_ListExecutionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
getDefaultInstanceForType() {
return com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest build() {
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest buildPartial() {
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest result =
new com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.view_ = view_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest) {
return mergeFrom(
(com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest other) {
if (other
== com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.view_ != 0) {
setViewValue(other.getViewValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
view_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the workflow for which the executions should be listed.
* Format: projects/{project}/locations/{location}/workflows/{workflow}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of executions to return per call.
* Max supported value depends on the selected Execution view: it's 10000 for
* BASIC and 100 for FULL. The default value used if the field is not
* specified is 100, regardless of the selected view. Values greater than
* the max value will be coerced down to it.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Maximum number of executions to return per call.
* Max supported value depends on the selected Execution view: it's 10000 for
* BASIC and 100 for FULL. The default value used if the field is not
* specified is 100, regardless of the selected view. Values greater than
* the max value will be coerced down to it.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of executions to return per call.
* Max supported value depends on the selected Execution view: it's 10000 for
* BASIC and 100 for FULL. The default value used if the field is not
* specified is 100, regardless of the selected view. Values greater than
* the max value will be coerced down to it.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListExecutions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListExecutions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int view_ = 0;
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for view.
*/
@java.lang.Override
public int getViewValue() {
return view_;
}
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for view to set.
* @return This builder for chaining.
*/
public Builder setViewValue(int value) {
view_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The view.
*/
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ExecutionView getView() {
com.google.cloud.workflows.executions.v1beta.ExecutionView result =
com.google.cloud.workflows.executions.v1beta.ExecutionView.forNumber(view_);
return result == null
? com.google.cloud.workflows.executions.v1beta.ExecutionView.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The view to set.
* @return This builder for chaining.
*/
public Builder setView(com.google.cloud.workflows.executions.v1beta.ExecutionView value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
view_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A view defining which fields should be filled in the returned executions.
* The API will default to the BASIC view.
* </pre>
*
* <code>
* .google.cloud.workflows.executions.v1beta.ExecutionView view = 4 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearView() {
bitField0_ = (bitField0_ & ~0x00000008);
view_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.workflows.executions.v1beta.ListExecutionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.workflows.executions.v1beta.ListExecutionsRequest)
private static final com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest();
}
public static com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExecutionsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListExecutionsRequest>() {
@java.lang.Override
public ListExecutionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExecutionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExecutionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.workflows.executions.v1beta.ListExecutionsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,051 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p3beta1/src/main/java/com/google/cloud/videointelligence/v1p3beta1/StreamingAnnotateVideoRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p3beta1;
/**
*
*
* <pre>
* The top-level message sent by the client for the `StreamingAnnotateVideo`
* method. Multiple `StreamingAnnotateVideoRequest` messages are sent.
* The first message must only contain a `StreamingVideoConfig` message.
* All subsequent messages must only contain `input_content` data.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest}
*/
public final class StreamingAnnotateVideoRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest)
StreamingAnnotateVideoRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingAnnotateVideoRequest.newBuilder() to construct.
private StreamingAnnotateVideoRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StreamingAnnotateVideoRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingAnnotateVideoRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_StreamingAnnotateVideoRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_StreamingAnnotateVideoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest.class,
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest.Builder
.class);
}
private int streamingRequestCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object streamingRequest_;
public enum StreamingRequestCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
VIDEO_CONFIG(1),
INPUT_CONTENT(2),
STREAMINGREQUEST_NOT_SET(0);
private final int value;
private StreamingRequestCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static StreamingRequestCase valueOf(int value) {
return forNumber(value);
}
public static StreamingRequestCase forNumber(int value) {
switch (value) {
case 1:
return VIDEO_CONFIG;
case 2:
return INPUT_CONTENT;
case 0:
return STREAMINGREQUEST_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public StreamingRequestCase getStreamingRequestCase() {
return StreamingRequestCase.forNumber(streamingRequestCase_);
}
public static final int VIDEO_CONFIG_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*
* @return Whether the videoConfig field is set.
*/
@java.lang.Override
public boolean hasVideoConfig() {
return streamingRequestCase_ == 1;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*
* @return The videoConfig.
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig getVideoConfig() {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig) streamingRequest_;
}
return com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.getDefaultInstance();
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigOrBuilder
getVideoConfigOrBuilder() {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig) streamingRequest_;
}
return com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.getDefaultInstance();
}
public static final int INPUT_CONTENT_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @return Whether the inputContent field is set.
*/
@java.lang.Override
public boolean hasInputContent() {
return streamingRequestCase_ == 2;
}
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @return The inputContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInputContent() {
if (streamingRequestCase_ == 2) {
return (com.google.protobuf.ByteString) streamingRequest_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (streamingRequestCase_ == 1) {
output.writeMessage(
1, (com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig) streamingRequest_);
}
if (streamingRequestCase_ == 2) {
output.writeBytes(2, (com.google.protobuf.ByteString) streamingRequest_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (streamingRequestCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1,
(com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig)
streamingRequest_);
}
if (streamingRequestCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
2, (com.google.protobuf.ByteString) streamingRequest_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest other =
(com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest) obj;
if (!getStreamingRequestCase().equals(other.getStreamingRequestCase())) return false;
switch (streamingRequestCase_) {
case 1:
if (!getVideoConfig().equals(other.getVideoConfig())) return false;
break;
case 2:
if (!getInputContent().equals(other.getInputContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (streamingRequestCase_) {
case 1:
hash = (37 * hash) + VIDEO_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getVideoConfig().hashCode();
break;
case 2:
hash = (37 * hash) + INPUT_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getInputContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The top-level message sent by the client for the `StreamingAnnotateVideo`
* method. Multiple `StreamingAnnotateVideoRequest` messages are sent.
* The first message must only contain a `StreamingVideoConfig` message.
* All subsequent messages must only contain `input_content` data.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest)
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_StreamingAnnotateVideoRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_StreamingAnnotateVideoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest.class,
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest.Builder
.class);
}
// Construct using
// com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (videoConfigBuilder_ != null) {
videoConfigBuilder_.clear();
}
streamingRequestCase_ = 0;
streamingRequest_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_StreamingAnnotateVideoRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest build() {
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
buildPartial() {
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest result =
new com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest result) {
result.streamingRequestCase_ = streamingRequestCase_;
result.streamingRequest_ = this.streamingRequest_;
if (streamingRequestCase_ == 1 && videoConfigBuilder_ != null) {
result.streamingRequest_ = videoConfigBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest) {
return mergeFrom(
(com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest other) {
if (other
== com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
.getDefaultInstance()) return this;
switch (other.getStreamingRequestCase()) {
case VIDEO_CONFIG:
{
mergeVideoConfig(other.getVideoConfig());
break;
}
case INPUT_CONTENT:
{
setInputContent(other.getInputContent());
break;
}
case STREAMINGREQUEST_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVideoConfigFieldBuilder().getBuilder(), extensionRegistry);
streamingRequestCase_ = 1;
break;
} // case 10
case 18:
{
streamingRequest_ = input.readBytes();
streamingRequestCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int streamingRequestCase_ = 0;
private java.lang.Object streamingRequest_;
public StreamingRequestCase getStreamingRequestCase() {
return StreamingRequestCase.forNumber(streamingRequestCase_);
}
public Builder clearStreamingRequest() {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.Builder,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigOrBuilder>
videoConfigBuilder_;
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*
* @return Whether the videoConfig field is set.
*/
@java.lang.Override
public boolean hasVideoConfig() {
return streamingRequestCase_ == 1;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*
* @return The videoConfig.
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig getVideoConfig() {
if (videoConfigBuilder_ == null) {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig)
streamingRequest_;
}
return com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig
.getDefaultInstance();
} else {
if (streamingRequestCase_ == 1) {
return videoConfigBuilder_.getMessage();
}
return com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig
.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
public Builder setVideoConfig(
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig value) {
if (videoConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
streamingRequest_ = value;
onChanged();
} else {
videoConfigBuilder_.setMessage(value);
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
public Builder setVideoConfig(
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.Builder builderForValue) {
if (videoConfigBuilder_ == null) {
streamingRequest_ = builderForValue.build();
onChanged();
} else {
videoConfigBuilder_.setMessage(builderForValue.build());
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
public Builder mergeVideoConfig(
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig value) {
if (videoConfigBuilder_ == null) {
if (streamingRequestCase_ == 1
&& streamingRequest_
!= com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig
.getDefaultInstance()) {
streamingRequest_ =
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.newBuilder(
(com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig)
streamingRequest_)
.mergeFrom(value)
.buildPartial();
} else {
streamingRequest_ = value;
}
onChanged();
} else {
if (streamingRequestCase_ == 1) {
videoConfigBuilder_.mergeFrom(value);
} else {
videoConfigBuilder_.setMessage(value);
}
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
public Builder clearVideoConfig() {
if (videoConfigBuilder_ == null) {
if (streamingRequestCase_ == 1) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
}
} else {
if (streamingRequestCase_ == 1) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
}
videoConfigBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.Builder
getVideoConfigBuilder() {
return getVideoConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigOrBuilder
getVideoConfigOrBuilder() {
if ((streamingRequestCase_ == 1) && (videoConfigBuilder_ != null)) {
return videoConfigBuilder_.getMessageOrBuilder();
} else {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig)
streamingRequest_;
}
return com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig
.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Provides information to the annotator, specifing how to process the
* request. The first `AnnotateStreamingVideoRequest` message must only
* contain a `video_config` message.
* </pre>
*
* <code>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig video_config = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.Builder,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigOrBuilder>
getVideoConfigFieldBuilder() {
if (videoConfigBuilder_ == null) {
if (!(streamingRequestCase_ == 1)) {
streamingRequest_ =
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig
.getDefaultInstance();
}
videoConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig.Builder,
com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigOrBuilder>(
(com.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig)
streamingRequest_,
getParentForChildren(),
isClean());
streamingRequest_ = null;
}
streamingRequestCase_ = 1;
onChanged();
return videoConfigBuilder_;
}
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @return Whether the inputContent field is set.
*/
public boolean hasInputContent() {
return streamingRequestCase_ == 2;
}
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @return The inputContent.
*/
public com.google.protobuf.ByteString getInputContent() {
if (streamingRequestCase_ == 2) {
return (com.google.protobuf.ByteString) streamingRequest_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @param value The inputContent to set.
* @return This builder for chaining.
*/
public Builder setInputContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
streamingRequestCase_ = 2;
streamingRequest_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The video data to be annotated. Chunks of video data are sequentially
* sent in `StreamingAnnotateVideoRequest` messages. Except the initial
* `StreamingAnnotateVideoRequest` message containing only
* `video_config`, all subsequent `AnnotateStreamingVideoRequest`
* messages must only contain `input_content` field.
* Note: as with all bytes fields, protobuffers use a pure binary
* representation (not base64).
* </pre>
*
* <code>bytes input_content = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearInputContent() {
if (streamingRequestCase_ == 2) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest)
private static final com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest();
}
public static com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StreamingAnnotateVideoRequest> PARSER =
new com.google.protobuf.AbstractParser<StreamingAnnotateVideoRequest>() {
@java.lang.Override
public StreamingAnnotateVideoRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StreamingAnnotateVideoRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StreamingAnnotateVideoRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hop | 38,007 | plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodboutput/MongoDbOutputData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.mongodboutput;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.MongoException;
import com.mongodb.util.JSON;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.exception.HopValueException;
import org.apache.hop.core.logging.ILogChannel;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.row.value.ValueMetaFactory;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.mongo.MongoDbException;
import org.apache.hop.mongo.metadata.MongoDbConnection;
import org.apache.hop.mongo.wrapper.MongoClientWrapper;
import org.apache.hop.mongo.wrapper.collection.MongoCollectionWrapper;
import org.apache.hop.mongo.wrapper.cursor.MongoCursorWrapper;
import org.apache.hop.pipeline.transform.BaseTransformData;
import org.apache.hop.pipeline.transform.ITransformData;
/** Data class for the MongoDbOutput transform */
public class MongoDbOutputData extends BaseTransformData implements ITransformData {
private static final Class<?> PKG = MongoDbOutputMeta.class;
public static final int MONGO_DEFAULT_PORT = 27017;
public static final String LOCAL_DB = "local";
public static final String REPL_SET_COLLECTION = "system.replset";
public static final String REPL_SET_SETTINGS = "settings";
public static final String REPL_SET_LAST_ERROR_MODES = "getLastErrorModes";
public static final String CONST_MONGO_DB_OUTPUT_MESSAGES_ERROR_NO_FIELD_NAME_SPECIFIED_FOR_PATH =
"MongoDbOutput.Messages.Error.NoFieldNameSpecifiedForPath";
public static final String CONST_PUSH = "$push";
/** Shared connection in metadata */
public MongoDbConnection connection;
/** Enum for the type of the top level object of the document structure */
public enum MongoTopLevel {
RECORD,
ARRAY,
INCONSISTENT;
}
/** The output row format */
protected IRowMeta outputRowMeta;
/** Main entry point to the mongo driver */
protected MongoClientWrapper clientWrapper;
/** Collection object for the user-specified document collection */
protected MongoCollectionWrapper collection;
protected List<MongoDbOutputMeta.MongoField> userFields;
/**
* Map for grouping together $set operations that involve setting complex array-based objects. Key
* = dot path to array name; value = DBObject specifying array to set to
*/
protected Map<String, List<MongoDbOutputMeta.MongoField>> setComplexArrays = new HashMap<>();
/**
* Map for grouping together $push operations that involve complex objects. Use [] to indicate the
* start of the complex object to push. Key - dot path to the array name to push to; value -
* DBObject specifying the complex object to push
*/
protected Map<String, List<MongoDbOutputMeta.MongoField>> pushComplexStructures = new HashMap<>();
/** all other modifier updates that involve primitive leaf fields */
protected Map<String, Object[]> primitiveLeafModifiers = new LinkedHashMap<>();
/**
* True if the list of paths specifies an incoming Hop field that contains a JSON doc that is
* intended to be inserted as is (i.e. not added to a field in the document structure defined by
* the mongo paths)
*/
protected boolean hasTopLevelJsonDocInsert = false;
public MongoDbOutputData() {
super();
}
public static boolean scanForInsertTopLevelJSONDoc(List<MongoDbOutputMeta.MongoField> fieldDefs)
throws HopException {
int countNonMatchFields = 0;
boolean hasTopLevelJSONDocInsert = false;
for (MongoDbOutputMeta.MongoField f : fieldDefs) {
if (f.inputJson
&& !f.updateMatchField
&& StringUtils.isEmpty(f.mongoDocPath)
&& !f.useIncomingFieldNameAsMongoFieldName) {
hasTopLevelJSONDocInsert = true;
}
if (!f.updateMatchField) {
countNonMatchFields++;
}
}
// Invalid path specification would be one where there is a top level
// JSON doc to be inserted (as is) but other field paths have been defined.
// TODO we could allow exactly one top level JSON doc and then have other
// paths punch data into this document I guess (which is kind of the
// opposite of the current functionality that allows the document to be
// defined from the specified paths and then allows non top-level JSON docs
// to punched into this structure)
if (hasTopLevelJSONDocInsert && countNonMatchFields > 1) {
// TODO
throw new HopException(
"Path specifications contains a top-level document in "
+ "JSON format to be inserted as is, but there are other insert paths "
+ "defined. When a top-level JSON document is to be inserted it must be "
+ "the only non-match field defined in the path specifications");
}
return hasTopLevelJSONDocInsert;
}
/**
* Set the field paths to use for creating the document structure
*
* @param fields the field paths to use
*/
public void setMongoFields(List<MongoDbOutputMeta.MongoField> fields) {
// copy this list
userFields = new ArrayList<>();
for (MongoDbOutputMeta.MongoField f : fields) {
userFields.add(f.copy());
}
}
/** Gets the field paths to use for creating the document structure */
public List<MongoDbOutputMeta.MongoField> getMongoFields() {
return userFields;
}
/**
* Initialize field paths
*
* @param vars variables to use
* @throws HopException if a problem occurs
*/
public void init(IVariables vars) throws HopException {
if (userFields != null) {
for (MongoDbOutputMeta.MongoField f : userFields) {
f.init(vars);
}
}
}
/**
* Get the current connection or null if not connected
*
* @return the connection or null
*/
public MongoClientWrapper getConnection() {
return clientWrapper;
}
/**
* Set the current connection
*
* @param clientWrapper the connection to use
*/
public void setConnection(MongoClientWrapper clientWrapper) {
this.clientWrapper = clientWrapper;
}
/**
* Create a collection in the current database
*
* @param collectionName the name of the collection to create
* @throws Exception if a problem occurs
*/
public void createCollection(String db, String collectionName) throws Exception {
if (clientWrapper == null) {
throw new Exception(
BaseMessages.getString(PKG, "MongoDbOutput.Messages.Error.NoDatabaseSet"));
}
clientWrapper.createCollection(db, collectionName);
}
/**
* Set the collection to use
*
* @param col the collection to use
*/
public void setCollection(MongoCollectionWrapper col) {
collection = col;
}
/**
* Get the collection in use
*
* @return the collection in use
*/
public MongoCollectionWrapper getCollection() {
return collection;
}
/**
* Set the output row format
*
* @param outM the output row format
*/
public void setOutputRowMeta(IRowMeta outM) {
outputRowMeta = outM;
}
/**
* Get the output row format
*
* @return the output row format
*/
public IRowMeta getOutputRowMeta() {
return outputRowMeta;
}
/**
* Apply the supplied index operations to the collection. Indexes can be defined on one or more
* fields in the document. Operation is either create or drop.
*
* @param indexes a list of index operations
* @param log the logging object
* @param truncate true if the collection was truncated in the current execution - in this case
* drop operations are not necessary
* @throws com.mongodb.MongoException if something goes wrong
* @throws org.apache.hop.mongo.MongoDbException
*/
public void applyIndexes(
List<MongoDbOutputMeta.MongoIndex> indexes, ILogChannel log, boolean truncate)
throws MongoException, MongoDbException {
for (MongoDbOutputMeta.MongoIndex index : indexes) {
String[] indexParts = index.pathToFields.split(",");
BasicDBObject mongoIndex = new BasicDBObject();
for (String indexKey : indexParts) {
String[] nameAndDirection = indexKey.split(":");
int direction = 1;
if (nameAndDirection.length == 2) {
direction = Integer.parseInt(nameAndDirection[1].trim());
}
String name = nameAndDirection[0];
// strip off brackets to get actual object name if terminal object
// is an array
if (name.contains("[")) {
name = name.substring(name.indexOf('[') + 1);
}
mongoIndex.put(name, direction);
}
if (index.drop) {
if (truncate) {
log.logBasic(
BaseMessages.getString(PKG, "MongoDbOutput.Messages.TruncateBeforeInsert", index));
} else {
collection.dropIndex(mongoIndex);
}
log.logBasic(BaseMessages.getString(PKG, "MongoDbOutput.Messages.DropIndex", index));
} else {
BasicDBObject options = new BasicDBObject();
// create indexes in the background
options.put("background", true);
options.put("unique", index.unique);
options.put("sparse", index.sparse);
collection.createIndex(mongoIndex, options);
log.logBasic(BaseMessages.getString(PKG, "MongoDbOutput.Messages.CreateIndex", index));
}
}
}
/**
* Get an object that encapsulates the fields and modifier operations to use for a modifier
* update.
*
* <p>NOTE: that with modifier upserts the query conditions get created if the record does not
* exist (i.e. insert). This is different than straight non- modifier upsert where the query
* conditions just locate a matching record (if any) and then a complete object replacement is
* done. So for standard upsert it is necessary to duplicate the query condition paths in order
* for these fields to be in the object that is inserted/updated.
*
* <p>This also means that certain modifier upserts are not possible in the case of insert. E.g.
* here we are wanting to test if the field "f1" in record "rec1" in the first element of array
* "two" is set to "george". If so, then we want to push a new record to the end of the array;
* otherwise create a new document with the array containing just the new record:
*
* <p>
*
* <p>
*
* <pre>
* db.collection.update({ "one.two.0.rec1.f1" : "george"},
* { "$push" : { "one.two" : { "rec1" : { "f1" : "bob" , "f2" : "fred"}}}},
* true)
* </pre>
*
* <p>This does not work and results in a "Cannot apply $push/$pushAll modifier to non-array"
* error if there is no match (i.e. insert condition). This is because the query conditions get
* created as well as the modifier opps and, furthermore, they get created first. Since mongo
* doesn't know whether ".0." indicates an array index or a field name it defaults to creating a
* field with name "0". This means that "one.two" gets created as a record (not an array) before
* the $push operation is executed. Hence the error.
*
* @param fieldDefs the list of document field definitions
* @param inputMeta the input row format
* @param row the current incoming row
* @param vars environment variables
* @param topLevelStructure the top level structure of the document
* @return a DBObject encapsulating the update to make
* @throws HopException if a problem occurs
*/
protected DBObject getModifierUpdateObject(
List<MongoDbOutputMeta.MongoField> fieldDefs,
IRowMeta inputMeta,
Object[] row,
IVariables vars,
MongoTopLevel topLevelStructure)
throws HopException, MongoDbException {
boolean haveUpdateFields = false;
boolean hasNonNullUpdateValues = false;
String mongoOperatorUpdateAllArray = "$[]";
// main update object, keyed by $ operator
BasicDBObject updateObject = new BasicDBObject();
setComplexArrays.clear();
primitiveLeafModifiers.clear();
pushComplexStructures.clear();
// do we need to determine whether this will be an insert or an update?
boolean checkForMatch = false;
for (MongoDbOutputMeta.MongoField field : fieldDefs) {
if (!field.updateMatchField
&& (field.modifierOperationApplyPolicy.equals("Insert")
|| field.modifierOperationApplyPolicy.equals("Update"))) {
checkForMatch = true;
break;
}
}
boolean isUpdate = false;
if (checkForMatch) {
DBObject query = getQueryObject(fieldDefs, inputMeta, row, vars, topLevelStructure);
MongoCursorWrapper cursor = getCollection().find(query).limit(1);
if (cursor.hasNext()) {
isUpdate = true;
}
}
for (MongoDbOutputMeta.MongoField field : fieldDefs) {
// skip query match fields
if (field.updateMatchField) {
continue;
}
String modifierUpdateOpp = field.environUpdateModifierOperation;
if (!StringUtils.isEmpty(modifierUpdateOpp) && !modifierUpdateOpp.equals("N/A")) {
if (checkForMatch) {
if (isUpdate && field.modifierOperationApplyPolicy.equals("Insert")) {
continue; // don't apply this opp
}
if (!isUpdate && field.modifierOperationApplyPolicy.equals("Update")) {
continue; // don't apply this opp
}
}
haveUpdateFields = true;
String incomingFieldName = field.environUpdatedFieldName;
int index = inputMeta.indexOfValue(incomingFieldName);
IValueMeta vm = inputMeta.getValueMeta(index);
if (!vm.isNull(row[index]) || field.insertNull) {
hasNonNullUpdateValues = true;
// modifier update objects have fields using "dot" notation to reach
// into embedded documents
String path =
(field.environUpdateMongoDocPath != null) ? field.environUpdateMongoDocPath : "";
if (path.endsWith("]")
&& modifierUpdateOpp.equals(CONST_PUSH)
&& !field.useIncomingFieldNameAsMongoFieldName) {
// strip off the brackets as push appends to the end of the named
// array
path = path.substring(0, path.indexOf('['));
}
boolean hasPath = !StringUtils.isEmpty(path);
path +=
((field.useIncomingFieldNameAsMongoFieldName)
? (hasPath ? "." + incomingFieldName : incomingFieldName)
: "");
// check for array creation
if (modifierUpdateOpp.equals("$set")
&& path.contains("[")
&& !path.contains(mongoOperatorUpdateAllArray)) {
String arrayPath = path.substring(0, path.indexOf('['));
String arraySpec = path.substring(path.indexOf('['));
MongoDbOutputMeta.MongoField a = new MongoDbOutputMeta.MongoField();
a.incomingFieldName = field.incomingFieldName;
a.environUpdatedFieldName = field.environUpdatedFieldName;
a.mongoDocPath = arraySpec;
a.environUpdateMongoDocPath = arraySpec;
// incoming field name has already been appended (if necessary)
a.useIncomingFieldNameAsMongoFieldName = false;
a.inputJson = field.inputJson;
a.init(vars, false);
List<MongoDbOutputMeta.MongoField> fds = setComplexArrays.get(arrayPath);
if (fds == null) {
fds = new ArrayList<>();
setComplexArrays.put(arrayPath, fds);
}
fds.add(a);
} else if (modifierUpdateOpp.equals(CONST_PUSH)
&& path.contains("[")
&& !path.contains(mongoOperatorUpdateAllArray)) {
// we ignore any index that might have been specified as $push
// always appends to the end of the array.
String arrayPath = path.substring(0, path.indexOf('['));
String structureToPush = path.substring(path.indexOf(']') + 1);
// check to see if we're pushing a record at this point in the path
// or another array...
if (structureToPush.charAt(0) == '.') {
// skip the dot
structureToPush = structureToPush.substring(1);
}
MongoDbOutputMeta.MongoField a = new MongoDbOutputMeta.MongoField();
a.incomingFieldName = field.incomingFieldName;
a.environUpdatedFieldName = field.environUpdatedFieldName;
a.mongoDocPath = structureToPush;
a.environUpdateMongoDocPath = structureToPush;
// incoming field name has already been appended (if necessary)
a.useIncomingFieldNameAsMongoFieldName = false;
a.inputJson = field.inputJson;
a.init(vars, false);
List<MongoDbOutputMeta.MongoField> fds = pushComplexStructures.get(arrayPath);
if (fds == null) {
fds = new ArrayList<>();
pushComplexStructures.put(arrayPath, fds);
}
fds.add(a);
} else {
Object[] params = new Object[4];
params[0] = modifierUpdateOpp;
params[1] = index;
params[2] = field.inputJson;
params[3] = field.insertNull;
primitiveLeafModifiers.put(path, params);
}
}
}
}
// do the array $sets
for (String path : setComplexArrays.keySet()) {
List<MongoDbOutputMeta.MongoField> fds = setComplexArrays.get(path);
DBObject valueToSet = hopRowToMongo(fds, inputMeta, row, MongoTopLevel.ARRAY, false);
DBObject fieldsToUpdateWithValues;
if (updateObject.get("$set") != null) {
// if we have some field(s) already associated with this type of
// modifier
// operation then just add to them
fieldsToUpdateWithValues = (DBObject) updateObject.get("$set");
} else {
// otherwise create a new DBObject for this modifier operation
fieldsToUpdateWithValues = new BasicDBObject();
}
fieldsToUpdateWithValues.put(path, valueToSet);
updateObject.put("$set", fieldsToUpdateWithValues);
}
// now do the $push complex
for (String path : pushComplexStructures.keySet()) {
List<MongoDbOutputMeta.MongoField> fds = pushComplexStructures.get(path);
// check our top-level structure
MongoTopLevel topLevel = MongoTopLevel.RECORD;
if (fds.get(0).mongoDocPath.charAt(0) == '[') {
topLevel = MongoTopLevel.RECORD;
}
DBObject valueToSet = hopRowToMongo(fds, inputMeta, row, topLevel, false);
DBObject fieldsToUpdateWithValues = null;
if (updateObject.get(CONST_PUSH) != null) {
// if we have some field(s) already associated with this type of
// modifier
// operation then just add to them
fieldsToUpdateWithValues = (DBObject) updateObject.get(CONST_PUSH);
} else {
// otherwise create a new DBObject for this modifier operation
fieldsToUpdateWithValues = new BasicDBObject();
}
fieldsToUpdateWithValues.put(path, valueToSet);
updateObject.put(CONST_PUSH, fieldsToUpdateWithValues);
}
// do the modifiers that involve primitive field values
for (Map.Entry<String, Object[]> entry : primitiveLeafModifiers.entrySet()) {
String path = entry.getKey();
Object[] params = entry.getValue();
String modifierUpdateOpp = params[0].toString();
int index = (Integer) params[1];
boolean isJSON = (Boolean) params[2];
boolean allowNull = (Boolean) params[3];
IValueMeta vm = inputMeta.getValueMeta(index);
DBObject fieldsToUpdateWithValues = null;
if (updateObject.get(modifierUpdateOpp) != null) {
// if we have some field(s) already associated with this type of
// modifier
// operation then just add to them
fieldsToUpdateWithValues = (DBObject) updateObject.get(modifierUpdateOpp);
} else {
// otherwise create a new DBObject for this modifier operation
fieldsToUpdateWithValues = new BasicDBObject();
}
setMongoValueFromHopValue(fieldsToUpdateWithValues, path, vm, row[index], isJSON, allowNull);
updateObject.put(modifierUpdateOpp, fieldsToUpdateWithValues);
}
if (!haveUpdateFields) {
throw new HopException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.Error.NoFieldsToUpdateSpecifiedForModifierOpp"));
}
if (!hasNonNullUpdateValues) {
return null;
}
return updateObject;
}
/**
* Get an object that encapsulates the query to make for an update/upsert operation
*
* @param fieldDefs the list of document field definitions
* @param inputMeta the input row format
* @param row the current incoming row
* @param vars environment variables
* @return a DBObject encapsulating the query
* @throws HopException if something goes wrong
*/
protected static DBObject getQueryObject(
List<MongoDbOutputMeta.MongoField> fieldDefs,
IRowMeta inputMeta,
Object[] row,
IVariables vars,
MongoTopLevel topLevelStructure)
throws HopException {
BasicDBObject query = new BasicDBObject();
boolean haveMatchFields = false;
boolean hasNonNullMatchValues = false;
for (MongoDbOutputMeta.MongoField field : fieldDefs) {
if (field.updateMatchField) {
haveMatchFields = true;
String incomingFieldName = field.environUpdatedFieldName;
int index = inputMeta.indexOfValue(incomingFieldName);
IValueMeta vm = inputMeta.getValueMeta(index);
// ignore null fields is not prohibited
if (vm.isNull(row[index]) && !field.insertNull) {
continue;
}
hasNonNullMatchValues = true;
if (field.inputJson
&& StringUtils.isEmpty(field.mongoDocPath)
&& !field.useIncomingFieldNameAsMongoFieldName) {
// We have a query based on a complete incoming JSON doc -
// i.e. no field processing necessary
if (vm.isString()) {
String val = vm.getString(row[index]);
query = (BasicDBObject) JSON.parse(val);
} else {
throw new HopException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.MatchFieldJSONButIncomingValueNotString"));
}
break;
}
// query objects have fields using "dot" notation to reach into embedded
// documents
String path =
(field.environUpdateMongoDocPath != null) ? field.environUpdateMongoDocPath : ""; //
boolean hasPath = !StringUtils.isEmpty(path);
path +=
((field.useIncomingFieldNameAsMongoFieldName)
? (hasPath
? "." //
+ incomingFieldName
: incomingFieldName)
: ""); //
// post process arrays to fit the dot notation (if not already done
// by the user)
if (path.contains("[")) {
path = path.replace("[", ".").replace("]", ""); // // //
}
setMongoValueFromHopValue(query, path, vm, row[index], field.inputJson, field.insertNull);
}
}
if (!haveMatchFields) {
throw new HopException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.Error.NoFieldsToUpdateSpecifiedForMatch")); //
}
if (!hasNonNullMatchValues) {
// indicates that we don't have anything to match with with respect to
// this row
return null;
}
return query;
}
/**
* Converts a Hop row to a Mongo Object for inserting/updating
*
* @param fieldDefs the document field definitions
* @param inputMeta the incoming row format
* @param row the current incoming row
* @param topLevelStructure the top level structure of the Mongo document
* @param hasTopLevelJSONDocInsert true if the user-specified paths include a single incoming Hop
* field value that contains a JSON document that is to be inserted as is
* @return a DBObject encapsulating the document to insert/upsert or null if there are no non-null
* incoming fields
* @throws HopException if a problem occurs
*/
protected static DBObject hopRowToMongo(
List<MongoDbOutputMeta.MongoField> fieldDefs,
IRowMeta inputMeta,
Object[] row,
MongoTopLevel topLevelStructure,
boolean hasTopLevelJSONDocInsert)
throws HopException {
// the easy case
if (hasTopLevelJSONDocInsert) {
for (MongoDbOutputMeta.MongoField f : fieldDefs) {
if (f.inputJson
&& StringUtils.isEmpty(f.mongoDocPath)
&& !f.useIncomingFieldNameAsMongoFieldName) {
String incomingFieldName = f.environUpdatedFieldName;
int index = inputMeta.indexOfValue(incomingFieldName);
IValueMeta vm = inputMeta.getValueMeta(index);
if (!vm.isNull(row[index])) {
String jsonDoc = vm.getString(row[index]);
return (DBObject) JSON.parse(jsonDoc);
} else {
return null;
}
}
}
}
DBObject root = null;
if (topLevelStructure == MongoTopLevel.RECORD) {
root = new BasicDBObject();
} else if (topLevelStructure == MongoTopLevel.ARRAY) {
root = new BasicDBList();
}
boolean haveNonNullFields = false;
for (MongoDbOutputMeta.MongoField field : fieldDefs) {
DBObject current = root;
field.reset();
List<String> pathParts = field.tempPathList;
String incomingFieldName = field.environUpdatedFieldName;
int index = inputMeta.indexOfValue(incomingFieldName);
IValueMeta vm = inputMeta.getValueMeta(index);
Object lookup =
getPathElementName(pathParts, current, field.useIncomingFieldNameAsMongoFieldName);
do {
// array?
if (lookup != null && lookup instanceof Integer) {
BasicDBList temp = (BasicDBList) current;
if (temp.get(lookup.toString()) == null) {
if (pathParts.isEmpty() && !field.useIncomingFieldNameAsMongoFieldName) {
// leaf - primitive element of the array (unless Hop field
// value is JSON)
boolean res =
setMongoValueFromHopValue(
temp, lookup, vm, row[index], field.inputJson, field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
} else {
// must be a record here (since multi-dimensional array creation
// is handled
// in getPathElementName())
// need to create this record/object
BasicDBObject newRec = new BasicDBObject();
temp.put(lookup.toString(), newRec);
current = newRec;
// end of the path?
if (pathParts.isEmpty()) {
if (field.useIncomingFieldNameAsMongoFieldName) {
boolean res =
setMongoValueFromHopValue(
current,
incomingFieldName,
vm,
row[index],
field.inputJson,
field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
} else {
throw new HopException(
BaseMessages.getString(
PKG,
CONST_MONGO_DB_OUTPUT_MESSAGES_ERROR_NO_FIELD_NAME_SPECIFIED_FOR_PATH)); //
}
}
}
} else {
// existing element of the array
current = (DBObject) temp.get(lookup.toString());
// no more path parts so we must be setting a field in an array
// element
// that is a record
if ((Utils.isEmpty(pathParts)) && current instanceof BasicDBObject) {
if (field.useIncomingFieldNameAsMongoFieldName) {
boolean res =
setMongoValueFromHopValue(
current,
incomingFieldName,
vm,
row[index],
field.inputJson,
field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
} else {
throw new HopException(
BaseMessages.getString(
PKG,
CONST_MONGO_DB_OUTPUT_MESSAGES_ERROR_NO_FIELD_NAME_SPECIFIED_FOR_PATH)); //
}
}
}
} else {
// record/object
if (lookup == null && pathParts.isEmpty()) {
if (field.useIncomingFieldNameAsMongoFieldName) {
boolean res =
setMongoValueFromHopValue(
current,
incomingFieldName,
vm,
row[index],
field.inputJson,
field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
} else {
throw new HopException(
BaseMessages.getString(
PKG,
CONST_MONGO_DB_OUTPUT_MESSAGES_ERROR_NO_FIELD_NAME_SPECIFIED_FOR_PATH)); //
}
} else {
if (pathParts.isEmpty()) {
if (!field.useIncomingFieldNameAsMongoFieldName) {
boolean res =
setMongoValueFromHopValue(
current,
lookup.toString(),
vm,
row[index],
field.inputJson,
field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
} else {
current = (DBObject) current.get(lookup.toString());
boolean res =
setMongoValueFromHopValue(
current,
incomingFieldName,
vm,
row[index],
field.inputJson,
field.insertNull);
haveNonNullFields = (haveNonNullFields || res);
}
} else {
current = (DBObject) current.get(lookup.toString());
}
}
}
lookup = getPathElementName(pathParts, current, field.useIncomingFieldNameAsMongoFieldName);
} while (lookup != null);
}
if (!haveNonNullFields) {
return null; // nothing has been set!
}
return root;
}
private static boolean setMongoValueFromHopValue(
DBObject mongoObject,
Object lookup,
IValueMeta hopType,
Object hopValue,
boolean hopValueIsJSON,
boolean allowNull)
throws HopValueException {
if (hopType.isNull(hopValue)) {
if (allowNull) {
mongoObject.put(lookup.toString(), null);
return true;
} else {
return false;
}
}
if (hopType.isString()) {
String val = hopType.getString(hopValue);
if (hopValueIsJSON) {
Object mongoO = JSON.parse(val);
mongoObject.put(lookup.toString(), mongoO);
} else {
mongoObject.put(lookup.toString(), val);
}
return true;
}
if (hopType.isBoolean()) {
Boolean val = hopType.getBoolean(hopValue);
mongoObject.put(lookup.toString(), val);
return true;
}
if (hopType.isInteger()) {
Long val = hopType.getInteger(hopValue);
mongoObject.put(lookup.toString(), val.longValue());
return true;
}
if (hopType.isDate()) {
Date val = hopType.getDate(hopValue);
mongoObject.put(lookup.toString(), val);
return true;
}
if (hopType.isNumber()) {
Double val = hopType.getNumber(hopValue);
mongoObject.put(lookup.toString(), val.doubleValue());
return true;
}
if (hopType.isBigNumber()) {
// use string value - user can use Hop to convert back
String val = hopType.getString(hopValue);
mongoObject.put(lookup.toString(), val);
return true;
}
if (hopType.isBinary()) {
byte[] val = hopType.getBinary(hopValue);
mongoObject.put(lookup.toString(), val);
return true;
}
// UUID
try {
int uuidTypeId = ValueMetaFactory.getIdForValueMeta("UUID");
if (hopType.getType() == uuidTypeId) {
UUID val = (UUID) hopType.convertData(hopType, hopValue);
mongoObject.put(lookup.toString(), val);
return true;
}
} catch (Exception ignore) {
// UUID plugin not present, fall through
}
if (hopType.isSerializableType()) {
throw new HopValueException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.Error.CantStoreHopSerializableVals")); //
}
return false;
}
private static Object getPathElementName(
List<String> pathParts, DBObject current, boolean incomingAsFieldName) throws HopException {
if (Utils.isEmpty(pathParts)) {
return null;
}
String part = pathParts.get(0);
if (part.startsWith("[")) { //
String index = part.substring(1, part.indexOf(']')).trim();
part = part.substring(part.indexOf(']') + 1).trim();
if (!part.isEmpty()) {
// any remaining characters must indicate a multi-dimensional array
pathParts.set(0, part);
// does this next array exist?
if (current.get(index) == null) {
BasicDBList newArr = new BasicDBList();
current.put(index, newArr);
}
} else {
// remove - we're finished with this part
pathParts.remove(0);
}
return Integer.valueOf(index);
} else if (part.endsWith("]")) { //
String fieldName = part.substring(0, part.indexOf('['));
Object mongoField = current.get(fieldName);
if (mongoField == null) {
// create this field
BasicDBList newField = new BasicDBList();
current.put(fieldName, newField);
} else {
// check type - should be an array
if (!(mongoField instanceof BasicDBList)) {
throw new HopException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.Error.FieldExistsButIsntAnArray", part)); //
}
}
part = part.substring(part.indexOf('['));
pathParts.set(0, part);
return fieldName;
}
// otherwise this path part is a record (object) or possibly a leaf (if we
// are not
// using the incoming Hop field name as the mongo field name)
Object mongoField = current.get(part);
if (mongoField == null) {
if (incomingAsFieldName || pathParts.size() > 1) {
// create this field
BasicDBObject newField = new BasicDBObject();
current.put(part, newField);
}
} else {
// check type = should be a record (object)
if (!(mongoField instanceof BasicDBObject) && pathParts.size() > 1) {
throw new HopException(
BaseMessages.getString(
PKG, "MongoDbOutput.Messages.Error.FieldExistsButIsntARecord", part)); //
}
}
pathParts.remove(0);
return part;
}
/**
* Determines the top level structure of the outgoing Mongo document from the user-specified field
* paths. This can be either RECORD ( for a top level structure that is an object), ARRAY or
* INCONSISTENT (if the user has some field paths that start with an array and some that start
* with an object).
*
* @param fieldDefs the list of document field paths
* @param vars environment variables
* @return the top level structure
*/
protected static MongoTopLevel checkTopLevelConsistency(
List<MongoDbOutputMeta.MongoField> fieldDefs, IVariables vars) throws HopException {
if (Utils.isEmpty(fieldDefs)) {
throw new HopException(
BaseMessages.getString(PKG, "MongoDbOutput.Messages.Error.NoMongoPathsDefined"));
}
int numRecords = 0;
int numArrays = 0;
for (MongoDbOutputMeta.MongoField field : fieldDefs) {
String mongoPath = vars.resolve(field.mongoDocPath);
if (StringUtils.isEmpty(mongoPath)) {
numRecords++;
} else if (mongoPath.startsWith("[")) { //
numArrays++;
} else {
numRecords++;
}
}
if (numRecords < fieldDefs.size() && numArrays < fieldDefs.size()) {
return MongoTopLevel.INCONSISTENT;
}
if (numRecords > 0) {
return MongoTopLevel.RECORD;
}
return MongoTopLevel.ARRAY;
}
}
|
apache/hadoop | 38,123 | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAccountConfiguration.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.azurebfs;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.ConfigurationPropertyNotFoundException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.TokenAccessProviderException;
import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.CustomTokenProviderAdapter;
import org.apache.hadoop.fs.azurebfs.oauth2.MsiTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.RefreshTokenBasedTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.UserPasswordTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.WorkloadIdentityTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.ClientAssertionProvider;
import org.apache.hadoop.fs.azurebfs.services.AuthType;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_CLIENT_SECRET;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_REFRESH_TOKEN;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_USER_NAME;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_USER_PASSWORD;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_SAS_TOKEN_PROVIDER_TYPE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
/**
* Tests correct precedence of various configurations that might be returned.
* Configuration can be specified with the account name as a suffix to the
* config key, or without one. Account-specific values should be returned
* whenever they exist. Account-agnostic values are returned if they do not.
* Default values are returned if neither exists.
*
* These tests are in 2 main groups: tests of methods that allow default values
* (such as get and getPasswordString) are of one form, while tests of methods
* that do allow default values (all others) follow another form.
*/
public class TestAccountConfiguration {
private static final String TEST_OAUTH_PROVIDER_CLASS_CONFIG = "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider";
private static final String TEST_OAUTH_MSI_TOKEN_PROVIDER_CLASS_CONFIG = "org.apache.hadoop.fs.azurebfs.oauth2.MsiTokenProvider";
private static final String TEST_CUSTOM_PROVIDER_CLASS_CONFIG = "org.apache.hadoop.fs.azurebfs.oauth2.RetryTestTokenProvider";
private static final String TEST_SAS_PROVIDER_CLASS_CONFIG_1 = "org.apache.hadoop.fs.azurebfs.extensions.MockErrorSASTokenProvider";
private static final String TEST_SAS_PROVIDER_CLASS_CONFIG_2 = "org.apache.hadoop.fs.azurebfs.extensions.MockSASTokenProvider";
private static final String TEST_OAUTH_ENDPOINT = "oauthEndpoint";
private static final String TEST_CLIENT_ID = "clientId";
private static final String TEST_CLIENT_SECRET = "clientSecret";
private static final String TEST_USER_NAME = "userName";
private static final String TEST_USER_PASSWORD = "userPassword";
private static final String TEST_MSI_TENANT = "msiTenant";
private static final String TEST_REFRESH_TOKEN = "refreshToken";
private static final String TEST_CUSTOM_CLIENT_ASSERTION_PROVIDER = "org.apache.hadoop.fs.azurebfs.TestAccountConfiguration$MockClientAssertionProvider";
private static final String TEST_TOKEN_FILE = "/tmp/test-token-file";
private static final List<String> CLIENT_CREDENTIAL_OAUTH_CONFIG_KEYS =
Collections.unmodifiableList(Arrays.asList(
FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT,
FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID,
FS_AZURE_ACCOUNT_OAUTH_CLIENT_SECRET));
private static final List<String> USER_PASSWORD_OAUTH_CONFIG_KEYS =
Collections.unmodifiableList(Arrays.asList(
FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT,
FS_AZURE_ACCOUNT_OAUTH_USER_NAME,
FS_AZURE_ACCOUNT_OAUTH_USER_PASSWORD));
private static final List<String> REFRESH_TOKEN_OAUTH_CONFIG_KEYS =
Collections.unmodifiableList(Arrays.asList(
FS_AZURE_ACCOUNT_OAUTH_REFRESH_TOKEN,
FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID));
private static final List<String> WORKLOAD_IDENTITY_OAUTH_CONFIG_KEYS =
Collections.unmodifiableList(Arrays.asList(
FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT,
FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID));
@Test
public void testStringPrecedence()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
AbfsConfiguration abfsConf;
final Configuration conf = new Configuration();
final String accountName1 = "account1";
final String accountName2 = "account2";
final String accountName3 = "account3";
final String globalKey = "fs.azure.configuration";
final String accountKey1 = globalKey + "." + accountName1;
final String accountKey2 = globalKey + "." + accountName2;
final String accountKey3 = globalKey + "." + accountName3;
final String globalValue = "global";
final String accountValue1 = "one";
final String accountValue2 = "two";
conf.set(accountKey1, accountValue1);
conf.set(accountKey2, accountValue2);
conf.set(globalKey, globalValue);
abfsConf = new AbfsConfiguration(conf, accountName1);
assertEquals(abfsConf.get(accountKey1), accountValue1,
"Wrong value returned when account-specific value was requested");
assertEquals(abfsConf.get(globalKey), accountValue1,
"Account-specific value was not returned when one existed");
abfsConf = new AbfsConfiguration(conf, accountName2);
assertEquals(abfsConf.get(accountKey1), accountValue1,
"Wrong value returned when a different account-specific value was requested");
assertEquals(abfsConf.get(accountKey2), accountValue2,
"Wrong value returned when account-specific value was requested");
assertEquals(abfsConf.get(globalKey), accountValue2,
"Account-agnostic value return even though account-specific value was set");
abfsConf = new AbfsConfiguration(conf, accountName3);
assertNull(
abfsConf.get(accountKey3), "Account-specific value returned when none was set");
assertEquals(abfsConf.get(globalKey), globalValue,
"Account-agnostic value not returned when no account-specific value was set");
}
@Test
public void testPasswordPrecedence()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
AbfsConfiguration abfsConf;
final Configuration conf = new Configuration();
final String accountName1 = "account1";
final String accountName2 = "account2";
final String accountName3 = "account3";
final String globalKey = "fs.azure.password";
final String accountKey1 = globalKey + "." + accountName1;
final String accountKey2 = globalKey + "." + accountName2;
final String accountKey3 = globalKey + "." + accountName3;
final String globalValue = "global";
final String accountValue1 = "one";
final String accountValue2 = "two";
conf.set(accountKey1, accountValue1);
conf.set(accountKey2, accountValue2);
conf.set(globalKey, globalValue);
abfsConf = new AbfsConfiguration(conf, accountName1);
assertEquals(abfsConf.getPasswordString(accountKey1), accountValue1,
"Wrong value returned when account-specific value was requested");
assertEquals(abfsConf.getPasswordString(globalKey), accountValue1,
"Account-specific value was not returned when one existed");
abfsConf = new AbfsConfiguration(conf, accountName2);
assertEquals(abfsConf.getPasswordString(accountKey1), accountValue1,
"Wrong value returned when a different account-specific value was requested");
assertEquals(abfsConf.getPasswordString(accountKey2), accountValue2,
"Wrong value returned when account-specific value was requested");
assertEquals(abfsConf.getPasswordString(globalKey), accountValue2,
"Account-agnostic value return even though account-specific value was set");
abfsConf = new AbfsConfiguration(conf, accountName3);
assertNull(abfsConf.getPasswordString(accountKey3),
"Account-specific value returned when none was set");
assertEquals(abfsConf.getPasswordString(globalKey), globalValue,
"Account-agnostic value not returned when no account-specific value was set");
}
@Test
public void testBooleanPrecedence()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
final String accountName = "account";
final String globalKey = "fs.azure.bool";
final String accountKey = globalKey + "." + accountName;
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
conf.setBoolean(globalKey, false);
assertEquals(abfsConf.getBoolean(globalKey, true), false,
"Default value returned even though account-agnostic config was set");
conf.unset(globalKey);
assertEquals(abfsConf.getBoolean(globalKey, true), true,
"Default value not returned even though config was unset");
conf.setBoolean(accountKey, false);
assertEquals(abfsConf.getBoolean(globalKey, true), false,
"Default value returned even though account-specific config was set");
conf.unset(accountKey);
assertEquals(abfsConf.getBoolean(globalKey, true), true,
"Default value not returned even though config was unset");
conf.setBoolean(accountKey, true);
conf.setBoolean(globalKey, false);
assertEquals(abfsConf.getBoolean(globalKey, false), true,
"Account-agnostic or default value returned even though account-specific config was set");
}
@Test
public void testLongPrecedence()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
final String accountName = "account";
final String globalKey = "fs.azure.long";
final String accountKey = globalKey + "." + accountName;
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
conf.setLong(globalKey, 0);
assertEquals(abfsConf.getLong(globalKey, 1), 0,
"Default value returned even though account-agnostic config was set");
conf.unset(globalKey);
assertEquals(abfsConf.getLong(globalKey, 1), 1,
"Default value not returned even though config was unset");
conf.setLong(accountKey, 0);
assertEquals(abfsConf.getLong(globalKey, 1), 0,
"Default value returned even though account-specific config was set");
conf.unset(accountKey);
assertEquals(abfsConf.getLong(globalKey, 1), 1,
"Default value not returned even though config was unset");
conf.setLong(accountKey, 1);
conf.setLong(globalKey, 0);
assertEquals(abfsConf.getLong(globalKey, 0), 1,
"Account-agnostic or default value returned even though account-specific config was set");
}
/**
* Dummy type used for testing handling of enums in configuration.
*/
public enum GetEnumType {
TRUE, FALSE
}
@Test
public void testEnumPrecedence()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
final String accountName = "account";
final String globalKey = "fs.azure.enum";
final String accountKey = globalKey + "." + accountName;
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
conf.setEnum(globalKey, GetEnumType.FALSE);
assertEquals(abfsConf.getEnum(globalKey, GetEnumType.TRUE), GetEnumType.FALSE,
"Default value returned even though account-agnostic config was set");
conf.unset(globalKey);
assertEquals(abfsConf.getEnum(globalKey, GetEnumType.TRUE), GetEnumType.TRUE,
"Default value not returned even though config was unset");
conf.setEnum(accountKey, GetEnumType.FALSE);
assertEquals(abfsConf.getEnum(globalKey, GetEnumType.TRUE), GetEnumType.FALSE,
"Default value returned even though account-specific config was set");
conf.unset(accountKey);
assertEquals(abfsConf.getEnum(globalKey, GetEnumType.TRUE), GetEnumType.TRUE,
"Default value not returned even though config was unset");
conf.setEnum(accountKey, GetEnumType.TRUE);
conf.setEnum(globalKey, GetEnumType.FALSE);
assertEquals(abfsConf.getEnum(globalKey, GetEnumType.FALSE), GetEnumType.TRUE,
"Account-agnostic or default value returned even though account-specific config was set");
}
/**
* Dummy type used for testing handling of classes in configuration.
*/
interface GetClassInterface {
}
/**
* Dummy type used for testing handling of classes in configuration.
*/
private class GetClassImpl0 implements GetClassInterface {
}
/**
* Dummy type used for testing handling of classes in configuration.
*/
private class GetClassImpl1 implements GetClassInterface {
}
@Test
public void testClass()
throws IllegalAccessException, IOException, InvalidConfigurationValueException {
final String accountName = "account";
final String globalKey = "fs.azure.class";
final String accountKey = globalKey + "." + accountName;
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final Class class0 = GetClassImpl0.class;
final Class class1 = GetClassImpl1.class;
final Class xface = GetClassInterface.class;
conf.setClass(globalKey, class0, xface);
assertEquals(abfsConf.getAccountAgnosticClass(globalKey, class1, xface), class0,
"Default value returned even though account-agnostic config was set");
conf.unset(globalKey);
assertEquals(abfsConf.getAccountAgnosticClass(globalKey, class1, xface), class1,
"Default value not returned even though config was unset");
conf.setClass(accountKey, class0, xface);
assertEquals(abfsConf.getAccountSpecificClass(globalKey, class1, xface), class0,
"Default value returned even though account-specific config was set");
conf.unset(accountKey);
assertEquals(abfsConf.getAccountSpecificClass(globalKey, class1, xface), class1,
"Default value not returned even though config was unset");
conf.setClass(accountKey, class1, xface);
conf.setClass(globalKey, class0, xface);
assertEquals(abfsConf.getAccountSpecificClass(globalKey, class0, xface), class1,
"Account-agnostic or default value returned even though account-specific config was set");
}
@Test
public void testSASProviderPrecedence()
throws IOException, IllegalAccessException {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
// AccountSpecific: SAS with provider set as SAS_Provider_1
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + "." + accountName,
"SAS");
abfsConf.set(FS_AZURE_SAS_TOKEN_PROVIDER_TYPE + "." + accountName,
TEST_SAS_PROVIDER_CLASS_CONFIG_1);
// Global: SAS with provider set as SAS_Provider_2
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME,
AuthType.SAS.toString());
abfsConf.set(FS_AZURE_SAS_TOKEN_PROVIDER_TYPE,
TEST_SAS_PROVIDER_CLASS_CONFIG_2);
Assertions.assertThat(
abfsConf.getSASTokenProvider().getClass().getName())
.describedAs(
"Account-specific SAS token provider should be in effect.")
.isEqualTo(TEST_SAS_PROVIDER_CLASS_CONFIG_1);
}
@Test
public void testAccessTokenProviderPrecedence()
throws IllegalAccessException, IOException {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
// Global: Custom , AccountSpecific: OAuth
testGlobalAndAccountOAuthPrecedence(abfsConf, AuthType.Custom,
AuthType.OAuth);
// Global: OAuth , AccountSpecific: Custom
testGlobalAndAccountOAuthPrecedence(abfsConf, AuthType.OAuth,
AuthType.Custom);
// Global: (non-oAuth) SAS , AccountSpecific: Custom
testGlobalAndAccountOAuthPrecedence(abfsConf, AuthType.SAS,
AuthType.Custom);
// Global: Custom , AccountSpecific: -
testGlobalAndAccountOAuthPrecedence(abfsConf, AuthType.Custom, null);
// Global: OAuth , AccountSpecific: -
testGlobalAndAccountOAuthPrecedence(abfsConf, AuthType.OAuth, null);
// Global: - , AccountSpecific: Custom
testGlobalAndAccountOAuthPrecedence(abfsConf, null, AuthType.Custom);
// Global: - , AccountSpecific: OAuth
testGlobalAndAccountOAuthPrecedence(abfsConf, null, AuthType.OAuth);
}
@Test
public void testOAuthConfigPropNotFound() throws Throwable {
testConfigPropNotFound(CLIENT_CREDENTIAL_OAUTH_CONFIG_KEYS, ClientCredsTokenProvider.class.getName());
testConfigPropNotFound(USER_PASSWORD_OAUTH_CONFIG_KEYS, UserPasswordTokenProvider.class.getName());
testConfigPropNotFound(REFRESH_TOKEN_OAUTH_CONFIG_KEYS, RefreshTokenBasedTokenProvider.class.getName());
testConfigPropNotFound(WORKLOAD_IDENTITY_OAUTH_CONFIG_KEYS, WorkloadIdentityTokenProvider.class.getName());
}
private void testConfigPropNotFound(List<String> configKeys,
String tokenProviderClassName)throws Throwable {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
for (String key : configKeys) {
setAuthConfig(abfsConf, true, AuthType.OAuth, tokenProviderClassName);
abfsConf.unset(key);
abfsConf.unset(key + "." + accountName);
testMissingConfigKey(abfsConf, key);
}
unsetAuthConfig(abfsConf, false);
unsetAuthConfig(abfsConf, true);
}
private static void testMissingConfigKey(final AbfsConfiguration abfsConf,
final String confKey) throws Throwable {
GenericTestUtils.assertExceptionContains("Configuration property "
+ confKey + " not found.",
LambdaTestUtils.verifyCause(
ConfigurationPropertyNotFoundException.class,
LambdaTestUtils.intercept(TokenAccessProviderException.class,
() -> abfsConf.getTokenProvider().getClass().getTypeName())));
}
@Test
public void testClientAndTenantIdOptionalWhenUsingMsiTokenProvider() throws Throwable {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
String authKey = FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix;
String providerClassKey = "";
String providerClassValue = "";
providerClassKey = FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix;
providerClassValue = TEST_OAUTH_MSI_TOKEN_PROVIDER_CLASS_CONFIG;
abfsConf.set(authKey, AuthType.OAuth.toString());
abfsConf.set(providerClassKey, providerClassValue);
AccessTokenProvider tokenProviderTypeName = abfsConf.getTokenProvider();
// Test that we managed to instantiate an MsiTokenProvider without having to define the tenant and client ID.
// Those 2 fields are optional as they can automatically be determined by the Azure Metadata service when
// running on an Azure VM.
Assertions.assertThat(tokenProviderTypeName).describedAs("Token Provider Should be MsiTokenProvider").isInstanceOf(MsiTokenProvider.class);
}
public void testGlobalAndAccountOAuthPrecedence(AbfsConfiguration abfsConf,
AuthType globalAuthType,
AuthType accountSpecificAuthType)
throws IOException {
if (globalAuthType == null) {
unsetAuthConfig(abfsConf, false);
} else {
setAuthConfig(abfsConf, false, globalAuthType, TEST_OAUTH_PROVIDER_CLASS_CONFIG);
}
if (accountSpecificAuthType == null) {
unsetAuthConfig(abfsConf, true);
} else {
setAuthConfig(abfsConf, true, accountSpecificAuthType, TEST_OAUTH_PROVIDER_CLASS_CONFIG);
}
// If account specific AuthType is present, precedence is always for it.
AuthType expectedEffectiveAuthType;
if (accountSpecificAuthType != null) {
expectedEffectiveAuthType = accountSpecificAuthType;
} else {
expectedEffectiveAuthType = globalAuthType;
}
Class<?> expectedEffectiveTokenProviderClassType =
(expectedEffectiveAuthType == AuthType.OAuth)
? ClientCredsTokenProvider.class
: CustomTokenProviderAdapter.class;
Assertions.assertThat(
abfsConf.getTokenProvider().getClass().getTypeName())
.describedAs(
"Account-specific settings takes precendence to global"
+ " settings. In absence of Account settings, global settings "
+ "should take effect.")
.isEqualTo(expectedEffectiveTokenProviderClassType.getTypeName());
unsetAuthConfig(abfsConf, false);
unsetAuthConfig(abfsConf, true);
}
public void setAuthConfig(AbfsConfiguration abfsConf,
boolean isAccountSetting,
AuthType authType, String tokenProviderClassName) {
final String accountNameSuffix = "." + abfsConf.getAccountName();
String authKey = FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME
+ (isAccountSetting ? accountNameSuffix : "");
String providerClassKey = "";
String providerClassValue = "";
switch (authType) {
case OAuth:
providerClassKey = FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME
+ (isAccountSetting ? accountNameSuffix : "");
providerClassValue = tokenProviderClassName;
setOAuthConfigs(abfsConf, isAccountSetting, tokenProviderClassName);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT
+ ((isAccountSetting) ? accountNameSuffix : ""),
TEST_OAUTH_ENDPOINT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID
+ ((isAccountSetting) ? accountNameSuffix : ""),
TEST_CLIENT_ID);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_SECRET
+ ((isAccountSetting) ? accountNameSuffix : ""),
TEST_CLIENT_SECRET);
break;
case Custom:
providerClassKey = FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME
+ (isAccountSetting ? accountNameSuffix : "");
providerClassValue = TEST_CUSTOM_PROVIDER_CLASS_CONFIG;
break;
case SAS:
providerClassKey = FS_AZURE_SAS_TOKEN_PROVIDER_TYPE
+ (isAccountSetting ? accountNameSuffix : "");
providerClassValue = TEST_SAS_PROVIDER_CLASS_CONFIG_1;
break;
default: // set nothing
}
abfsConf.set(authKey, authType.toString());
abfsConf.set(providerClassKey, providerClassValue);
}
private void setOAuthConfigs(AbfsConfiguration abfsConfig, boolean isAccountSettings, String tokenProviderClassName) {
String accountNameSuffix = isAccountSettings ? ("." + abfsConfig.getAccountName()) : "";
if (tokenProviderClassName.equals(ClientCredsTokenProvider.class.getName())) {
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT + accountNameSuffix,
TEST_OAUTH_ENDPOINT);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix,
TEST_CLIENT_ID);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_SECRET + accountNameSuffix,
TEST_CLIENT_SECRET);
}
if (tokenProviderClassName.equals(UserPasswordTokenProvider.class.getName())) {
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT + accountNameSuffix,
TEST_OAUTH_ENDPOINT);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_USER_NAME + accountNameSuffix,
TEST_USER_NAME);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_USER_PASSWORD + accountNameSuffix,
TEST_USER_PASSWORD);
}
if (tokenProviderClassName.equals(MsiTokenProvider.class.getName())) {
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix,
TEST_MSI_TENANT);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix,
TEST_CLIENT_ID);
}
if (tokenProviderClassName.equals(RefreshTokenBasedTokenProvider.class.getName())) {
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_REFRESH_TOKEN + accountNameSuffix,
TEST_REFRESH_TOKEN);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix,
TEST_CLIENT_ID);
}
if (tokenProviderClassName.equals(WorkloadIdentityTokenProvider.class.getName())) {
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix,
TEST_MSI_TENANT);
abfsConfig.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix,
TEST_CLIENT_ID);
}
}
private void unsetAuthConfig(AbfsConfiguration abfsConf, boolean isAccountSettings) {
String accountNameSuffix =
isAccountSettings ? ("." + abfsConf.getAccountName()) : "";
abfsConf.unset(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix);
abfsConf.unset(FS_AZURE_SAS_TOKEN_PROVIDER_TYPE + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ENDPOINT + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_CLIENT_SECRET + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_USER_NAME + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_USER_PASSWORD + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_REFRESH_TOKEN + accountNameSuffix);
abfsConf.unset(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix);
}
/**
* Mock implementation of ClientAssertionProvider for testing
*/
public static class MockClientAssertionProvider implements ClientAssertionProvider {
@Override
public void initialize(Configuration configuration, String accountName) throws IOException {
// Mock implementation
}
@Override
public String getClientAssertion() throws IOException {
return "mock-jwt-token";
}
}
/**
* Test that WorkloadIdentityTokenProvider can be configured with custom ClientAssertionProvider
*/
@Test
public void testWorkloadIdentityTokenProviderWithCustomClientAssertionProvider() throws Exception {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
// Set up OAuth with WorkloadIdentityTokenProvider
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix, AuthType.OAuth.toString());
abfsConf.set(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix,
WorkloadIdentityTokenProvider.class.getName());
// Set required OAuth parameters
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix, TEST_MSI_TENANT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix, TEST_CLIENT_ID);
// Set custom ClientAssertionProvider
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix,
TEST_CUSTOM_CLIENT_ASSERTION_PROVIDER);
AccessTokenProvider tokenProvider = abfsConf.getTokenProvider();
Assertions.assertThat(tokenProvider)
.describedAs("Should create WorkloadIdentityTokenProvider with custom ClientAssertionProvider")
.isInstanceOf(WorkloadIdentityTokenProvider.class);
// Verify that the custom provider configuration was read and used
String customProviderType = abfsConf.getPasswordString(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE);
Assertions.assertThat(customProviderType)
.describedAs("Custom provider type should be configured")
.isEqualTo(TEST_CUSTOM_CLIENT_ASSERTION_PROVIDER);
}
/**
* Test that WorkloadIdentityTokenProvider falls back to file-based approach when no custom provider is configured
*/
@Test
public void testWorkloadIdentityTokenProviderWithFileBasedFallback() throws Exception {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
// Set up OAuth with WorkloadIdentityTokenProvider
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix, AuthType.OAuth.toString());
abfsConf.set(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix,
WorkloadIdentityTokenProvider.class.getName());
// Set required OAuth parameters
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix, TEST_MSI_TENANT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix, TEST_CLIENT_ID);
// Don't set custom provider - should fallback to file-based approach
// abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix, ...);
AccessTokenProvider tokenProvider = abfsConf.getTokenProvider();
Assertions.assertThat(tokenProvider)
.describedAs("Should create WorkloadIdentityTokenProvider with file-based fallback")
.isInstanceOf(WorkloadIdentityTokenProvider.class);
// Verify that no custom provider is configured (should be null or empty)
String customProviderType = abfsConf.getPasswordString(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE);
Assertions.assertThat(customProviderType)
.describedAs("No custom provider should be configured for file-based fallback")
.isNull();
}
/**
* Test that invalid custom ClientAssertionProvider class name throws appropriate exception
*/
@Test
public void testWorkloadIdentityTokenProviderWithInvalidCustomProvider() throws Exception {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
// Set up OAuth with WorkloadIdentityTokenProvider
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix, AuthType.OAuth.toString());
abfsConf.set(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix,
WorkloadIdentityTokenProvider.class.getName());
// Set required OAuth parameters
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix, TEST_MSI_TENANT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix, TEST_CLIENT_ID);
// Set invalid custom ClientAssertionProvider class
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix,
"non.existent.InvalidProvider");
TokenAccessProviderException exception = LambdaTestUtils.intercept(
TokenAccessProviderException.class,
() -> abfsConf.getTokenProvider());
Assertions.assertThat(exception.getMessage())
.describedAs("Should contain error about unable to load OAuth token provider class")
.contains("Unable to load OAuth token provider class");
}
/**
* Test that empty/whitespace custom ClientAssertionProvider config falls back to file-based approach
*/
@Test
public void testWorkloadIdentityTokenProviderWithEmptyCustomProviderConfig() throws Exception {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
// Set up OAuth with WorkloadIdentityTokenProvider
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix, AuthType.OAuth.toString());
abfsConf.set(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix,
WorkloadIdentityTokenProvider.class.getName());
// Set required OAuth parameters
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix, TEST_MSI_TENANT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix, TEST_CLIENT_ID);
// Set empty custom ClientAssertionProvider - should fallback to file-based
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix, " ");
AccessTokenProvider tokenProvider = abfsConf.getTokenProvider();
Assertions.assertThat(tokenProvider)
.describedAs("Should create WorkloadIdentityTokenProvider with file-based fallback when provider config is empty")
.isInstanceOf(WorkloadIdentityTokenProvider.class);
// Verify that the empty provider configuration is read but treated as empty
String customProviderType = abfsConf.getPasswordString(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE);
Assertions.assertThat(customProviderType)
.describedAs("Empty custom provider config should be present but whitespace-only")
.isEqualTo(" ");
// Verify that when trimmed, it's empty (this is what triggers file-based fallback)
Assertions.assertThat(customProviderType.trim())
.describedAs("Trimmed custom provider config should be empty")
.isEmpty();
}
/**
* Test that configuration precedence works for custom ClientAssertionProvider
* (account-specific vs account-agnostic)
*/
@Test
public void testWorkloadIdentityCustomProviderConfigPrecedence() throws Exception {
final String accountName = "account";
final Configuration conf = new Configuration();
final AbfsConfiguration abfsConf = new AbfsConfiguration(conf, accountName);
final String accountNameSuffix = "." + abfsConf.getAccountName();
// Set up OAuth with WorkloadIdentityTokenProvider
abfsConf.set(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME + accountNameSuffix, AuthType.OAuth.toString());
abfsConf.set(FS_AZURE_ACCOUNT_TOKEN_PROVIDER_TYPE_PROPERTY_NAME + accountNameSuffix,
WorkloadIdentityTokenProvider.class.getName());
// Set required OAuth parameters
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_MSI_TENANT + accountNameSuffix, TEST_MSI_TENANT);
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ID + accountNameSuffix, TEST_CLIENT_ID);
// Set account-agnostic custom provider (should be overridden by account-specific)
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE, "some.other.Provider");
// Set account-specific custom provider (should take precedence)
abfsConf.set(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE + accountNameSuffix,
TEST_CUSTOM_CLIENT_ASSERTION_PROVIDER);
AccessTokenProvider tokenProvider = abfsConf.getTokenProvider();
Assertions.assertThat(tokenProvider)
.describedAs("Should create WorkloadIdentityTokenProvider with account-specific custom provider taking precedence")
.isInstanceOf(WorkloadIdentityTokenProvider.class);
// Verify that account-specific configuration takes precedence over account-agnostic
String accountSpecificProvider = abfsConf.getPasswordString(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE);
Assertions.assertThat(accountSpecificProvider)
.describedAs("Account-specific custom provider should take precedence")
.isEqualTo(TEST_CUSTOM_CLIENT_ASSERTION_PROVIDER);
// Verify that the account-agnostic setting exists but isn't used
String accountAgnosticProvider = abfsConf.getRawConfiguration().get(FS_AZURE_ACCOUNT_OAUTH_CLIENT_ASSERTION_PROVIDER_TYPE);
Assertions.assertThat(accountAgnosticProvider)
.describedAs("Account-agnostic setting should exist but not be used")
.isEqualTo("some.other.Provider");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.