repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/incubator-hugegraph | 36,849 | hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraTables.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hugegraph.backend.store.cassandra;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.hugegraph.backend.BackendException;
import org.apache.hugegraph.backend.id.EdgeId;
import org.apache.hugegraph.backend.id.Id;
import org.apache.hugegraph.backend.id.IdGenerator;
import org.apache.hugegraph.backend.id.IdUtil;
import org.apache.hugegraph.backend.query.Query;
import org.apache.hugegraph.backend.store.BackendEntry;
import org.apache.hugegraph.backend.store.BackendEntryIterator;
import org.apache.hugegraph.type.HugeType;
import org.apache.hugegraph.type.define.Directions;
import org.apache.hugegraph.type.define.HugeKeys;
import org.apache.hugegraph.util.E;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.exceptions.DriverException;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Update;
import com.datastax.driver.core.querybuilder.Using;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hugegraph.util.HashUtil;
public class CassandraTables {
public static final String LABEL_INDEX = "label_index";
public static final String NAME_INDEX = "name_index";
private static final DataType TYPE_PK = DataType.cint();
private static final DataType TYPE_SL = DataType.cint(); // VL/EL
private static final DataType TYPE_IL = DataType.cint();
private static final DataType TYPE_UD = DataType.map(DataType.text(),
DataType.text());
private static final DataType TYPE_ID = DataType.blob();
private static final DataType TYPE_PROP = DataType.blob();
private static final DataType TYPE_TTL = DataType.bigint();
private static final DataType TYPE_EXPIRED_TIME = DataType.bigint();
private static final long COMMIT_DELETE_BATCH = Query.COMMIT_BATCH;
public static class Meta extends CassandraTable {
public static final String TABLE = HugeType.META.string();
public Meta() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.NAME, DataType.text()
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.VALUE, DataType.text()
);
this.createTable(session, pkeys, ckeys, columns);
}
public void writeVersion(CassandraSessionPool.Session session,
String version) {
Insert insert = QueryBuilder.insertInto(TABLE);
insert.value(formatKey(HugeKeys.NAME), formatKey(HugeKeys.VERSION));
insert.value(formatKey(HugeKeys.VALUE), version);
session.execute(insert);
}
public String readVersion(CassandraSessionPool.Session session) {
Clause where = formatEQ(HugeKeys.NAME, formatKey(HugeKeys.VERSION));
Select select = QueryBuilder.select(formatKey(HugeKeys.VALUE))
.from(TABLE);
select.where(where);
Row row = session.execute(select).one();
if (row == null) {
return null;
}
return row.getString(formatKey(HugeKeys.VALUE));
}
}
public static class Counters extends CassandraTable {
public static final String TABLE = HugeType.COUNTER.string();
public Counters() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.SCHEMA_TYPE, DataType.text()
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.ID, DataType.counter()
);
this.createTable(session, pkeys, ckeys, columns);
}
public long getCounter(CassandraSessionPool.Session session,
HugeType type) {
Clause where = formatEQ(HugeKeys.SCHEMA_TYPE, type.name());
Select select = QueryBuilder.select(formatKey(HugeKeys.ID))
.from(TABLE);
select.where(where);
Row row = session.execute(select).one();
if (row == null) {
return 0L;
} else {
return row.getLong(formatKey(HugeKeys.ID));
}
}
public void increaseCounter(CassandraSessionPool.Session session,
HugeType type, long increment) {
Update update = QueryBuilder.update(TABLE);
update.with(QueryBuilder.incr(formatKey(HugeKeys.ID), increment));
update.where(formatEQ(HugeKeys.SCHEMA_TYPE, type.name()));
session.execute(update);
}
}
public static class VertexLabel extends CassandraTable {
public static final String TABLE = HugeType.VERTEX_LABEL.string();
public VertexLabel() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, TYPE_SL
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap
.<HugeKeys, DataType>builder()
.put(HugeKeys.NAME, DataType.text())
.put(HugeKeys.ID_STRATEGY, DataType.tinyint())
.put(HugeKeys.PRIMARY_KEYS, DataType.list(TYPE_PK))
.put(HugeKeys.NULLABLE_KEYS, DataType.set(TYPE_PK))
.put(HugeKeys.INDEX_LABELS, DataType.set(TYPE_IL))
.put(HugeKeys.PROPERTIES, DataType.set(TYPE_PK))
.put(HugeKeys.ENABLE_LABEL_INDEX, DataType.cboolean())
.put(HugeKeys.USER_DATA, TYPE_UD)
.put(HugeKeys.STATUS, DataType.tinyint())
.put(HugeKeys.TTL, TYPE_TTL)
.put(HugeKeys.TTL_START_TIME, TYPE_PK)
.build();
this.createTable(session, pkeys, ckeys, columns);
this.createIndex(session, NAME_INDEX, HugeKeys.NAME);
}
}
public static class EdgeLabel extends CassandraTable {
public static final String TABLE = HugeType.EDGE_LABEL.string();
public EdgeLabel() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, TYPE_SL
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap
.<HugeKeys, DataType>builder()
.put(HugeKeys.NAME, DataType.text())
.put(HugeKeys.FREQUENCY, DataType.tinyint())
.put(HugeKeys.SOURCE_LABEL, TYPE_SL)
.put(HugeKeys.TARGET_LABEL, TYPE_SL)
.put(HugeKeys.SORT_KEYS, DataType.list(TYPE_PK))
.put(HugeKeys.NULLABLE_KEYS, DataType.set(TYPE_PK))
.put(HugeKeys.INDEX_LABELS, DataType.set(TYPE_IL))
.put(HugeKeys.PROPERTIES, DataType.set(TYPE_PK))
.put(HugeKeys.ENABLE_LABEL_INDEX, DataType.cboolean())
.put(HugeKeys.USER_DATA, TYPE_UD)
.put(HugeKeys.STATUS, DataType.tinyint())
.put(HugeKeys.TTL, TYPE_TTL)
.put(HugeKeys.TTL_START_TIME, TYPE_PK)
.build();
this.createTable(session, pkeys, ckeys, columns);
this.createIndex(session, NAME_INDEX, HugeKeys.NAME);
}
}
public static class PropertyKey extends CassandraTable {
public static final String TABLE = HugeType.PROPERTY_KEY.string();
public PropertyKey() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, DataType.cint()
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap
.<HugeKeys, DataType>builder()
.put(HugeKeys.NAME, DataType.text())
.put(HugeKeys.DATA_TYPE, DataType.tinyint())
.put(HugeKeys.CARDINALITY, DataType.tinyint())
.put(HugeKeys.AGGREGATE_TYPE, DataType.tinyint())
.put(HugeKeys.WRITE_TYPE, DataType.tinyint())
.put(HugeKeys.PROPERTIES, DataType.set(TYPE_PK))
.put(HugeKeys.USER_DATA, TYPE_UD)
.put(HugeKeys.STATUS, DataType.tinyint())
.build();
this.createTable(session, pkeys, ckeys, columns);
this.createIndex(session, NAME_INDEX, HugeKeys.NAME);
}
}
public static class IndexLabel extends CassandraTable {
public static final String TABLE = HugeType.INDEX_LABEL.string();
public IndexLabel() {
super(TABLE);
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, TYPE_IL
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap
.<HugeKeys, DataType>builder()
.put(HugeKeys.NAME, DataType.text())
.put(HugeKeys.BASE_TYPE, DataType.tinyint())
.put(HugeKeys.BASE_VALUE, TYPE_SL)
.put(HugeKeys.INDEX_TYPE, DataType.tinyint())
.put(HugeKeys.FIELDS, DataType.list(TYPE_PK))
.put(HugeKeys.USER_DATA, TYPE_UD)
.put(HugeKeys.STATUS, DataType.tinyint())
.build();
this.createTable(session, pkeys, ckeys, columns);
this.createIndex(session, NAME_INDEX, HugeKeys.NAME);
}
}
public static class Vertex extends CassandraTable {
public static final String TABLE = HugeType.VERTEX.string();
public Vertex(String store) {
super(joinTableName(store, TABLE));
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.LABEL, TYPE_SL,
HugeKeys.PROPERTIES, DataType.map(TYPE_PK, TYPE_PROP),
HugeKeys.EXPIRED_TIME, TYPE_EXPIRED_TIME
);
this.createTable(session, pkeys, ckeys, columns);
this.createIndex(session, LABEL_INDEX, HugeKeys.LABEL);
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
Insert insert = this.buildInsert(entry);
session.add(setTtl(insert, entry));
}
@Override
public void append(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
Update append = this.buildAppend(entry);
session.add(setTtl(append, entry));
}
}
public static class Edge extends CassandraTable {
public static final String TABLE_SUFFIX = HugeType.EDGE.string();
private final String store;
private final Directions direction;
protected Edge(String store, Directions direction) {
super(joinTableName(store, table(direction)));
this.store = store;
this.direction = direction;
}
protected String edgesTable(Directions direction) {
return joinTableName(this.store, table(direction));
}
protected Directions direction() {
return this.direction;
}
protected String labelIndexTable() {
return this.table();
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.OWNER_VERTEX, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of(
HugeKeys.DIRECTION, DataType.tinyint(),
HugeKeys.LABEL, TYPE_SL,
HugeKeys.SORT_VALUES, DataType.text(),
HugeKeys.OTHER_VERTEX, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.PROPERTIES, DataType.map(TYPE_PK, TYPE_PROP),
HugeKeys.EXPIRED_TIME, TYPE_EXPIRED_TIME
);
this.createTable(session, pkeys, ckeys, columns);
/*
* Only out-edges table needs label index because we query edges
* by label from out-edges table
*/
if (this.direction == Directions.OUT) {
this.createIndex(session, LABEL_INDEX, HugeKeys.LABEL);
}
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
Insert insert = this.buildInsert(entry);
session.add(setTtl(insert, entry));
}
@Override
public void append(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
Update update = this.buildAppend(entry);
session.add(setTtl(update, entry));
}
@Override
protected List<HugeKeys> pkColumnName() {
return ImmutableList.of(HugeKeys.OWNER_VERTEX);
}
@Override
protected List<HugeKeys> idColumnName() {
return Arrays.stream(EdgeId.KEYS)
.filter(key -> !Objects.equals(key, HugeKeys.SUB_LABEL))
.collect(Collectors.toList());
}
@Override
protected List<Object> idColumnValue(Id id) {
EdgeId edgeId;
if (id instanceof EdgeId) {
edgeId = (EdgeId) id;
} else {
String[] idParts = EdgeId.split(id);
if (idParts.length == 1) {
// Delete edge by label
return Arrays.asList(idParts);
}
id = IdUtil.readString(id.asString());
edgeId = EdgeId.parse(id.asString());
}
E.checkState(edgeId.direction() == this.direction,
"Can't query %s edges from %s edges table",
edgeId.direction(), this.direction);
return idColumnValue(edgeId);
}
protected final List<Object> idColumnValue(EdgeId edgeId) {
// TODO: move to Serializer
List<Object> list = new ArrayList<>(5);
list.add(IdUtil.writeBinString(edgeId.ownerVertexId()));
list.add(edgeId.directionCode());
list.add(edgeId.edgeLabelId().asLong());
list.add(edgeId.sortValues());
list.add(IdUtil.writeBinString(edgeId.otherVertexId()));
return list;
}
@Override
public void delete(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
/*
* TODO: Delete edge by label
* Need to implement the framework that can delete with query
* which contains id or condition.
*/
// Let super class do delete if not deleting edge by label
List<Object> idParts = this.idColumnValue(entry.id());
if (idParts.size() > 1 || !entry.columns().isEmpty()) {
super.delete(session, entry);
return;
}
// The only element is labeled
this.deleteEdgesByLabel(session, entry.id());
}
protected void deleteEdgesByLabel(CassandraSessionPool.Session session,
Id label) {
// Edges in edges_in table will be deleted when direction is OUT
if (this.direction == Directions.IN) {
return;
}
final String OWNER_VERTEX = formatKey(HugeKeys.OWNER_VERTEX);
final String SORT_VALUES = formatKey(HugeKeys.SORT_VALUES);
final String OTHER_VERTEX = formatKey(HugeKeys.OTHER_VERTEX);
// Query edges by label index
Select select = QueryBuilder.select().from(this.labelIndexTable());
select.where(formatEQ(HugeKeys.LABEL, label.asLong()));
ResultSet rs;
try {
rs = session.execute(select);
} catch (DriverException e) {
throw new BackendException("Failed to query edges " +
"with label '%s' for deleting", e, label);
}
// Delete edges
long count = 0L;
for (Row row : rs) {
Object ownerVertex = row.getObject(OWNER_VERTEX);
Object sortValues = row.getObject(SORT_VALUES);
Object otherVertex = row.getObject(OTHER_VERTEX);
// Delete OUT edges from edges_out table
session.add(buildDelete(label, ownerVertex, Directions.OUT,
sortValues, otherVertex));
// Delete IN edges from edges_in table
session.add(buildDelete(label, otherVertex, Directions.IN,
sortValues, ownerVertex));
count += 2L;
if (count >= COMMIT_DELETE_BATCH) {
session.commit();
count = 0;
}
}
if (count > 0L) {
session.commit();
}
}
private Delete buildDelete(Id label, Object ownerVertex,
Directions direction, Object sortValues,
Object otherVertex) {
Delete delete = QueryBuilder.delete().from(edgesTable(direction));
delete.where(formatEQ(HugeKeys.OWNER_VERTEX, ownerVertex));
delete.where(formatEQ(HugeKeys.DIRECTION,
EdgeId.directionToCode(direction)));
delete.where(formatEQ(HugeKeys.LABEL, label.asLong()));
delete.where(formatEQ(HugeKeys.SORT_VALUES, sortValues));
delete.where(formatEQ(HugeKeys.OTHER_VERTEX, otherVertex));
return delete;
}
@Override
protected BackendEntry mergeEntries(BackendEntry e1, BackendEntry e2) {
// Merge edges into vertex
// TODO: merge rows before calling row2Entry()
CassandraBackendEntry current = (CassandraBackendEntry) e1;
CassandraBackendEntry next = (CassandraBackendEntry) e2;
E.checkState(current == null || current.type().isVertex(),
"The current entry must be null or VERTEX");
E.checkState(next != null && next.type().isEdge(),
"The next entry must be EDGE");
long maxSize = BackendEntryIterator.INLINE_BATCH_SIZE;
if (current != null && current.subRows().size() < maxSize) {
Object nextVertexId = next.column(HugeKeys.OWNER_VERTEX);
if (current.id().equals(IdGenerator.of(nextVertexId))) {
current.subRow(next.row());
return current;
}
}
return this.wrapByVertex(next);
}
private CassandraBackendEntry wrapByVertex(CassandraBackendEntry edge) {
assert edge.type().isEdge();
Object ownerVertex = edge.column(HugeKeys.OWNER_VERTEX);
E.checkState(ownerVertex != null, "Invalid backend entry");
Id vertexId = IdGenerator.of(ownerVertex);
CassandraBackendEntry vertex = new CassandraBackendEntry(
HugeType.VERTEX, vertexId);
vertex.column(HugeKeys.ID, ownerVertex);
vertex.column(HugeKeys.PROPERTIES, ImmutableMap.of());
vertex.subRow(edge.row());
return vertex;
}
private static String table(Directions direction) {
assert direction == Directions.OUT || direction == Directions.IN;
return direction.type().string() + TABLE_SUFFIX;
}
public static CassandraTable out(String store) {
return new Edge(store, Directions.OUT);
}
public static CassandraTable in(String store) {
return new Edge(store, Directions.IN);
}
}
public static class SecondaryIndex extends CassandraTable {
public static final String TABLE = HugeType.SECONDARY_INDEX.string();
public SecondaryIndex(String store) {
this(store, TABLE);
}
protected SecondaryIndex(String store, String table) {
super(joinTableName(store, table));
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.FIELD_VALUES, DataType.text()
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of(
HugeKeys.INDEX_LABEL_ID, TYPE_IL,
HugeKeys.ELEMENT_IDS, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.EXPIRED_TIME, TYPE_EXPIRED_TIME
);
this.createTable(session, pkeys, ckeys, columns);
}
@Override
protected List<HugeKeys> idColumnName() {
return ImmutableList.of(HugeKeys.FIELD_VALUES,
HugeKeys.INDEX_LABEL_ID,
HugeKeys.ELEMENT_IDS);
}
@Override
protected List<HugeKeys> modifiableColumnName() {
return ImmutableList.of();
}
@Override
public void delete(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
String fieldValues = entry.column(HugeKeys.FIELD_VALUES);
if (fieldValues != null) {
super.delete(session, entry);
return;
}
Long indexLabel = entry.column(HugeKeys.INDEX_LABEL_ID);
if (indexLabel == null) {
throw new BackendException("SecondaryIndex deletion needs " +
"INDEX_LABEL_ID, but not provided.");
}
Select select = QueryBuilder.select().from(this.table());
select.where(formatEQ(HugeKeys.INDEX_LABEL_ID, indexLabel));
select.allowFiltering();
ResultSet rs;
try {
rs = session.execute(select);
} catch (DriverException e) {
throw new BackendException("Failed to query secondary " +
"indexes with index label id '%s' for deleting",
indexLabel, e);
}
final String FIELD_VALUES = formatKey(HugeKeys.FIELD_VALUES);
long count = 0L;
for (Row r : rs) {
fieldValues = r.get(FIELD_VALUES, String.class);
Delete delete = QueryBuilder.delete().from(this.table());
delete.where(formatEQ(HugeKeys.INDEX_LABEL_ID, indexLabel));
delete.where(formatEQ(HugeKeys.FIELD_VALUES, fieldValues));
session.add(delete);
if (++count >= COMMIT_DELETE_BATCH) {
session.commit();
count = 0L;
}
}
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
throw new BackendException("SecondaryIndex insertion is not supported.");
}
@Override
public void append(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
assert entry.columns().size() == 3 || entry.columns().size() == 4;
Insert insert = this.buildInsert(entry);
session.add(setTtl(insert, entry));
}
@Override
public void eliminate(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
assert entry.columns().size() == 3 || entry.columns().size() == 4;
this.delete(session, entry);
}
}
public static class SearchIndex extends SecondaryIndex {
public static final String TABLE = HugeType.SEARCH_INDEX.string();
public SearchIndex(String store) {
super(store, TABLE);
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
throw new BackendException("SearchIndex insertion is not supported.");
}
}
/**
* TODO: set field value as key and set element id as value
*/
public static class UniqueIndex extends SecondaryIndex {
public static final String TABLE = HugeType.UNIQUE_INDEX.string();
public UniqueIndex(String store) {
super(store, TABLE);
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
throw new BackendException("UniqueIndex insertion is not supported.");
}
}
public abstract static class RangeIndex extends CassandraTable {
protected RangeIndex(String store, String table) {
super(joinTableName(store, table));
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.INDEX_LABEL_ID, TYPE_IL
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of(
HugeKeys.FIELD_VALUES, this.fieldValuesType(),
HugeKeys.ELEMENT_IDS, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.EXPIRED_TIME, TYPE_EXPIRED_TIME
);
this.createTable(session, pkeys, ckeys, columns);
}
protected DataType fieldValuesType() {
return DataType.decimal();
}
@Override
protected List<HugeKeys> idColumnName() {
return ImmutableList.of(HugeKeys.INDEX_LABEL_ID,
HugeKeys.FIELD_VALUES,
HugeKeys.ELEMENT_IDS);
}
@Override
protected List<HugeKeys> modifiableColumnName() {
return ImmutableList.of();
}
@Override
public void delete(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
Object fieldValues = entry.column(HugeKeys.FIELD_VALUES);
if (fieldValues != null) {
super.delete(session, entry);
return;
}
Long indexLabel = entry.column(HugeKeys.INDEX_LABEL_ID);
if (indexLabel == null) {
throw new BackendException("Range index deletion needs INDEX_LABEL_ID, " +
"but not provided.");
}
Delete delete = QueryBuilder.delete().from(this.table());
delete.where(formatEQ(HugeKeys.INDEX_LABEL_ID, indexLabel));
session.add(delete);
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
throw new BackendException("RangeIndex insertion is not supported.");
}
@Override
public void append(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
assert entry.columns().size() == 3 || entry.columns().size() == 4;
Insert insert = this.buildInsert(entry);
session.add(setTtl(insert, entry));
}
@Override
public void eliminate(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
assert entry.columns().size() == 3 || entry.columns().size() == 4;
this.delete(session, entry);
}
}
public static class RangeIntIndex extends RangeIndex {
public static final String TABLE = HugeType.RANGE_INT_INDEX.string();
public RangeIntIndex(String store) {
super(store, TABLE);
}
@Override
protected DataType fieldValuesType() {
return DataType.cint();
}
}
public static class RangeFloatIndex extends RangeIndex {
public static final String TABLE = HugeType.RANGE_FLOAT_INDEX.string();
public RangeFloatIndex(String store) {
super(store, TABLE);
}
@Override
protected DataType fieldValuesType() {
return DataType.cfloat();
}
}
public static class RangeLongIndex extends RangeIndex {
public static final String TABLE = HugeType.RANGE_LONG_INDEX.string();
public RangeLongIndex(String store) {
super(store, TABLE);
}
@Override
protected DataType fieldValuesType() {
// TODO: DataType.varint()
return DataType.bigint();
}
}
public static class RangeDoubleIndex extends RangeIndex {
public static final String TABLE = HugeType.RANGE_DOUBLE_INDEX.string();
public RangeDoubleIndex(String store) {
super(store, TABLE);
}
@Override
protected DataType fieldValuesType() {
return DataType.cdouble();
}
}
public static class ShardIndex extends RangeIndex {
public static final String TABLE = HugeType.SHARD_INDEX.string();
public ShardIndex(String store) {
super(store, TABLE);
}
@Override
protected DataType fieldValuesType() {
return DataType.text();
}
@Override
public void insert(CassandraSessionPool.Session session,
CassandraBackendEntry.Row entry) {
throw new BackendException("ShardIndex insertion is not supported.");
}
}
public static class Olap extends CassandraTable {
public static final String TABLE = HugeType.OLAP.string();
private Id pkId;
public Olap(String store, Id id) {
super(joinTableName(store, joinTableName(TABLE, id.asString())));
this.pkId = id;
}
@Override
public void init(CassandraSessionPool.Session session) {
ImmutableMap<HugeKeys, DataType> pkeys = ImmutableMap.of(
HugeKeys.ID, TYPE_ID
);
ImmutableMap<HugeKeys, DataType> ckeys = ImmutableMap.of();
ImmutableMap<HugeKeys, DataType> columns = ImmutableMap.of(
HugeKeys.PROPERTY_VALUE, TYPE_PROP
);
this.createTable(session, pkeys, ckeys, columns);
}
@Override
protected Iterator<BackendEntry> results2Entries(Query q, ResultSet r) {
return new CassandraEntryIterator(r, q, (e1, row) -> {
CassandraBackendEntry e2 = row2Entry(q.resultType(), row);
e2.subId(this.pkId);
return this.mergeEntries(e1, e2);
});
}
@Override
public boolean isOlap() {
return true;
}
}
public static class OlapSecondaryIndex extends SecondaryIndex {
public static final String TABLE = HugeType.OLAP.string();
public OlapSecondaryIndex(String store) {
this(store, TABLE);
}
protected OlapSecondaryIndex(String store, String table) {
super(joinTableName(store, table));
}
}
public static class OlapRangeIntIndex extends RangeIntIndex {
public static final String TABLE = HugeType.OLAP.string();
public OlapRangeIntIndex(String store) {
this(store, TABLE);
}
protected OlapRangeIntIndex(String store, String table) {
super(joinTableName(store, table));
}
}
public static class OlapRangeLongIndex extends RangeLongIndex {
public static final String TABLE = HugeType.OLAP.string();
public OlapRangeLongIndex(String store) {
this(store, TABLE);
}
protected OlapRangeLongIndex(String store, String table) {
super(joinTableName(store, table));
}
}
public static class OlapRangeFloatIndex extends RangeFloatIndex {
public static final String TABLE = HugeType.OLAP.string();
public OlapRangeFloatIndex(String store) {
this(store, TABLE);
}
protected OlapRangeFloatIndex(String store, String table) {
super(joinTableName(store, table));
}
}
public static class OlapRangeDoubleIndex extends RangeDoubleIndex {
public static final String TABLE = HugeType.OLAP.string();
public OlapRangeDoubleIndex(String store) {
this(store, TABLE);
}
protected OlapRangeDoubleIndex(String store, String table) {
super(joinTableName(store, table));
}
}
private static Statement setTtl(BuiltStatement statement,
CassandraBackendEntry.Row entry) {
long ttl = entry.ttl();
if (ttl != 0L) {
int calcTtl = (int) Math.ceil(ttl / 1000D);
Using usingTtl = QueryBuilder.ttl(calcTtl);
if (statement instanceof Insert) {
((Insert) statement).using(usingTtl);
} else {
assert statement instanceof Update;
((Update) statement).using(usingTtl);
}
}
return statement;
}
}
|
apache/incubator-kie-drools | 37,031 | drools-drl/drools-drl-parser/src/main/java/org/drools/drl/parser/lang/ParserHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.drools.drl.parser.lang;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.RecognizerSharedState;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenStream;
import org.drools.drl.ast.descr.AnnotatedBaseDescr;
import org.drools.drl.ast.descr.AttributeDescr;
import org.drools.drl.ast.descr.BaseDescr;
import org.drools.drl.ast.descr.RelationalExprDescr;
import org.drools.drl.ast.descr.RuleDescr;
import org.drools.drl.ast.dsl.AbstractClassTypeDeclarationBuilder;
import org.drools.drl.ast.dsl.AccumulateDescrBuilder;
import org.drools.drl.ast.dsl.AccumulateImportDescrBuilder;
import org.drools.drl.ast.dsl.AttributeDescrBuilder;
import org.drools.drl.ast.dsl.AttributeSupportBuilder;
import org.drools.drl.ast.dsl.BehaviorDescrBuilder;
import org.drools.drl.ast.dsl.CEDescrBuilder;
import org.drools.drl.ast.dsl.CollectDescrBuilder;
import org.drools.drl.ast.dsl.ConditionalBranchDescrBuilder;
import org.drools.drl.ast.dsl.DeclareDescrBuilder;
import org.drools.drl.ast.dsl.DescrBuilder;
import org.drools.drl.ast.dsl.DescrFactory;
import org.drools.drl.ast.dsl.EntryPointDeclarationDescrBuilder;
import org.drools.drl.ast.dsl.EnumDeclarationDescrBuilder;
import org.drools.drl.ast.dsl.EnumLiteralDescrBuilder;
import org.drools.drl.ast.dsl.EvalDescrBuilder;
import org.drools.drl.ast.dsl.FieldDescrBuilder;
import org.drools.drl.ast.dsl.ForallDescrBuilder;
import org.drools.drl.ast.dsl.FunctionDescrBuilder;
import org.drools.drl.ast.dsl.GlobalDescrBuilder;
import org.drools.drl.ast.dsl.GroupByDescrBuilder;
import org.drools.drl.ast.dsl.ImportDescrBuilder;
import org.drools.drl.ast.dsl.NamedConsequenceDescrBuilder;
import org.drools.drl.ast.dsl.PackageDescrBuilder;
import org.drools.drl.ast.dsl.PatternContainerDescrBuilder;
import org.drools.drl.ast.dsl.PatternDescrBuilder;
import org.drools.drl.ast.dsl.QueryDescrBuilder;
import org.drools.drl.ast.dsl.RuleDescrBuilder;
import org.drools.drl.ast.dsl.TypeDeclarationDescrBuilder;
import org.drools.drl.ast.dsl.UnitDescrBuilder;
import org.drools.drl.ast.dsl.WindowDeclarationDescrBuilder;
import org.drools.drl.parser.DroolsParserException;
import org.drools.drl.parser.impl.Operator;
import org.kie.internal.builder.conf.LanguageLevelOption;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a class to hold all the helper functions/methods used
* by the DRL parser
*/
public class ParserHelper {
public static final Logger LOG = LoggerFactory.getLogger(ParserHelper.class );
public final String[] statementKeywords = new String[]{
DroolsSoftKeywords.PACKAGE,
DroolsSoftKeywords.UNIT,
DroolsSoftKeywords.IMPORT,
DroolsSoftKeywords.GLOBAL,
DroolsSoftKeywords.DECLARE,
DroolsSoftKeywords.FUNCTION,
DroolsSoftKeywords.RULE,
DroolsSoftKeywords.QUERY
};
public List<DroolsParserException> errors = new ArrayList<>();
public LinkedList<DroolsSentence> editorInterface = null;
public boolean isEditorInterfaceEnabled = false;
private Deque<Map<DroolsParaphraseTypes, String>> paraphrases = new ArrayDeque<>();
// parameters from parser
private DroolsParserExceptionFactory errorMessageFactory = null;
private TokenStream input = null;
private RecognizerSharedState state = null;
private String leftMostExpr = null;
// helper attribute
private boolean hasOperator = false;
private final LanguageLevelOption languageLevel;
private static final Set<String> BUILT_IN_OPERATORS = Arrays.stream(Operator.BuiltInOperator.values())
.map(Operator.BuiltInOperator::getSymbol)
.collect(Collectors.toSet());
private static int logCounterHalfConstraint = 0;
private static int logCounterCustomOperator = 0;
private static int logCounterInfixAnd = 0;
private static int logCounterInfixOr = 0;
private static int logCounterAnnotationInLhsPattern = 0;
private static int logCounterAgendaGroup = 0;
public ParserHelper(TokenStream input,
RecognizerSharedState state,
LanguageLevelOption languageLevel) {
this.errorMessageFactory = new DroolsParserExceptionFactory( paraphrases, languageLevel );
this.input = input;
this.state = state;
this.languageLevel = languageLevel;
}
public LinkedList<DroolsSentence> getEditorInterface() {
return editorInterface;
}
public void setLeftMostExpr( String value ) {
this.leftMostExpr = value;
}
public String getLeftMostExpr() {
return this.leftMostExpr;
}
public void enableEditorInterface() {
isEditorInterfaceEnabled = true;
}
public void disableEditorInterface() {
isEditorInterfaceEnabled = false;
}
public void setHasOperator( boolean hasOperator ) {
this.hasOperator = hasOperator;
}
public boolean getHasOperator() {
return hasOperator;
}
public void beginSentence( DroolsSentenceType sentenceType ) {
if ( isEditorInterfaceEnabled ) {
if ( null == editorInterface ) {
editorInterface = new LinkedList<>();
}
if (editorInterface.isEmpty()){
DroolsSentence sentence = new DroolsSentence();
sentence.setType( sentenceType );
editorInterface.add( sentence );
}
}
}
public DroolsSentence getActiveSentence() {
return editorInterface.getLast();
}
public void emit( List< ? > tokens,
DroolsEditorType editorType ) {
if ( isEditorInterfaceEnabled && tokens != null ) {
for ( Object activeObject : tokens ) {
emit( (Token) activeObject,
editorType );
}
}
}
public void emit( Token token,
DroolsEditorType editorType ) {
if ( isEditorInterfaceEnabled && token != null && editorType != null ) {
((DroolsToken) token).setEditorType( editorType );
getActiveSentence().addContent( (DroolsToken) token );
}
}
public void emit( int activeContext ) {
if ( isEditorInterfaceEnabled ) {
getActiveSentence().addContent( activeContext );
}
}
public DroolsToken getLastTokenOnList( LinkedList< ? > list ) {
DroolsToken lastToken = null;
for ( Object object : list ) {
if ( object instanceof DroolsToken ) {
lastToken = (DroolsToken) object;
}
}
return lastToken;
}
public String retrieveLT( int LTNumber ) {
if ( null == input ) return null;
if ( null == input.LT( LTNumber ) ) return null;
if ( null == input.LT( LTNumber ).getText() ) return null;
return input.LT( LTNumber ).getText();
}
public boolean validateLT( int LTNumber,
String text ) {
String text2Validate = retrieveLT( LTNumber );
return validateText( text, text2Validate );
}
private boolean validateText( String text, String text2Validate ) {
return text2Validate != null && text2Validate.equals( text );
}
public boolean isPluggableEvaluator( int offset,
boolean negated ) {
String text2Validate = retrieveLT( offset );
return text2Validate != null && DroolsSoftKeywords.isOperator(text2Validate, negated);
}
public boolean isPluggableEvaluator( boolean negated ) {
return isPluggableEvaluator( 1,
negated );
}
public boolean validateIdentifierKey( String text ) {
return validateLT( 1,
text );
}
public boolean validateCEKeyword( int index ) {
String text2Validate = retrieveLT( index );
return validateText( text2Validate,
DroolsSoftKeywords.NOT ) ||
validateText( text2Validate,
DroolsSoftKeywords.EXISTS ) ||
validateText( text2Validate,
DroolsSoftKeywords.FORALL ) ||
validateText( text2Validate,
DroolsSoftKeywords.AND ) ||
validateText( text2Validate,
DroolsSoftKeywords.OR ) ||
validateText( text2Validate,
DroolsSoftKeywords.COLLECT ) ||
validateText( text2Validate,
DroolsSoftKeywords.FROM ) ||
validateText( text2Validate,
DroolsSoftKeywords.END ) ||
validateText( text2Validate,
DroolsSoftKeywords.EVAL ) ||
validateText( text2Validate,
DroolsSoftKeywords.OVER ) ||
validateText( text2Validate,
DroolsSoftKeywords.THEN );
}
public boolean validateStatement( int index ) {
boolean ret = false;
String text2Validate = retrieveLT( index );
for ( String st : statementKeywords ) {
if ( validateText( text2Validate,
st ) ) {
ret = true;
break;
}
}
return ret || validateAttribute( index );
}
public boolean validateAttribute( int index ) {
String text2Validate = retrieveLT( index );
return validateText( text2Validate,
DroolsSoftKeywords.SALIENCE ) ||
validateText( text2Validate,
DroolsSoftKeywords.ENABLED ) ||
(validateText( text2Validate,
DroolsSoftKeywords.NO ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.LOOP )) ||
(validateText( text2Validate,
DroolsSoftKeywords.AUTO ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.FOCUS )) ||
(validateText( text2Validate,
DroolsSoftKeywords.LOCK ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.ON ) &&
validateLT( index + 3,
"-" ) &&
validateLT( index + 4,
DroolsSoftKeywords.ACTIVE )) ||
(validateText( text2Validate,
DroolsSoftKeywords.AGENDA ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.GROUP )) ||
(validateText( text2Validate,
DroolsSoftKeywords.ACTIVATION ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.GROUP )) ||
(validateText( text2Validate,
DroolsSoftKeywords.RULEFLOW ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.GROUP )) ||
(validateText( text2Validate,
DroolsSoftKeywords.DATE ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.EFFECTIVE )) ||
(validateText( text2Validate,
DroolsSoftKeywords.DATE ) &&
validateLT( index + 1,
"-" ) &&
validateLT( index + 2,
DroolsSoftKeywords.EXPIRES )) ||
validateText( text2Validate,
DroolsSoftKeywords.DIALECT ) ||
validateText( text2Validate,
DroolsSoftKeywords.CALENDARS ) ||
validateText( text2Validate,
DroolsSoftKeywords.TIMER ) ||
validateText( text2Validate,
DroolsSoftKeywords.DURATION ) ||
validateText( text2Validate,
DroolsSoftKeywords.REFRACT ) ||
validateText( text2Validate,
DroolsSoftKeywords.DIRECT );
}
public void reportError( RecognitionException ex ) {
// if we've already reported an error and have not matched a token
// yet successfully, don't report any errors.
if ( state.errorRecovery ) {
return;
}
state.errorRecovery = true;
errors.add( errorMessageFactory.createDroolsException( ex ) );
}
public void reportError( Exception e ) {
try {
errors.add( errorMessageFactory.createDroolsException( e,
input.LT( 1 ) ) );
} catch (Exception ignored) {
errors.add(new DroolsParserException( "Unexpected error: " + e.getMessage(), e ));
}
}
/** return the raw DroolsParserException errors */
public List<DroolsParserException> getErrors() {
return errors;
}
/** Return a list of pretty strings summarising the errors */
public List<String> getErrorMessages() {
List<String> messages = new ArrayList<>( errors.size() );
for ( DroolsParserException activeException : errors ) {
messages.add( activeException.getMessage() );
}
return messages;
}
/** return true if any parser errors were accumulated */
public boolean hasErrors() {
return !errors.isEmpty();
}
/**
* Method that adds a paraphrase type into paraphrases stack.
*
* @param type
* paraphrase type
*/
public void pushParaphrases( DroolsParaphraseTypes type ) {
Map<DroolsParaphraseTypes, String> activeMap = new HashMap<>();
activeMap.put( type,
"" );
paraphrases.push( activeMap );
}
public Map<DroolsParaphraseTypes, String> popParaphrases() {
return paraphrases.pop();
}
/**
* Method that sets paraphrase value for a type into paraphrases stack.
*
* @param type
* paraphrase type
* @param value
* paraphrase value
*/
public void setParaphrasesValue( DroolsParaphraseTypes type,
String value ) {
paraphrases.peek().put( type,
value );
}
void setStart( DescrBuilder< ? , ? > db ) {
setStart( db,
input.LT( 1 ) );
}
void setStart( DescrBuilder< ? , ? > db,
Token first ) {
if ( db != null && first != null ) {
db.startCharacter( ((CommonToken) first).getStartIndex() ).startLocation( first.getLine(),
first.getCharPositionInLine() );
}
}
void setStart( BaseDescr descr,
Token first ) {
if ( descr != null && first != null ) {
descr.setLocation( first.getLine(),
first.getCharPositionInLine() );
descr.setStartCharacter( ((CommonToken) first).getStartIndex() );
}
}
void setEnd( BaseDescr descr ) {
Token last = input.LT( -1 );
if ( descr != null && last != null ) {
int endLocation = last.getText() != null ? last.getCharPositionInLine() + last.getText().length() - 1 : last.getCharPositionInLine();
descr.setEndCharacter( ((CommonToken) last).getStopIndex() + 1 );
descr.setEndLocation( last.getLine(),
endLocation );
}
}
void setEnd( DescrBuilder< ? , ? > db ) {
Token last = input.LT( -1 );
if ( db != null && last != null ) {
int endLocation = last.getText() != null ? last.getCharPositionInLine() + last.getText().length() - 1 : last.getCharPositionInLine();
db.endCharacter( ((CommonToken) last).getStopIndex() + 1 ).endLocation( last.getLine(),
endLocation );
}
}
@SuppressWarnings("unchecked")
public <T extends DescrBuilder< ? , ? >> T start( DescrBuilder< ? , ? > ctxBuilder,
Class<T> clazz,
String param ) {
if ( state.backtracking == 0 ) {
if ( PackageDescrBuilder.class.isAssignableFrom( clazz ) ) {
pushParaphrases( DroolsParaphraseTypes.PACKAGE );
beginSentence( DroolsSentenceType.PACKAGE );
setStart( ctxBuilder );
} else if ( ImportDescrBuilder.class.isAssignableFrom( clazz ) ) {
ImportDescrBuilder imp;
if ( validateLT( 2,
DroolsSoftKeywords.FUNCTION ) ||
validateLT( 2,
DroolsSoftKeywords.STATIC ) ) {
imp = ctxBuilder == null ?
DescrFactory.newPackage().newFunctionImport() :
((PackageDescrBuilder) ctxBuilder).newFunctionImport();
} else {
imp = ctxBuilder == null ?
DescrFactory.newPackage().newImport() :
((PackageDescrBuilder) ctxBuilder).newImport();
}
pushParaphrases( DroolsParaphraseTypes.IMPORT );
beginSentence( DroolsSentenceType.IMPORT_STATEMENT );
setStart( imp );
return (T) imp;
} else if ( UnitDescrBuilder.class.isAssignableFrom( clazz ) ) {
UnitDescrBuilder imp = ctxBuilder == null ?
DescrFactory.newPackage().newUnit() :
((PackageDescrBuilder) ctxBuilder).newUnit();
pushParaphrases( DroolsParaphraseTypes.UNIT );
beginSentence( DroolsSentenceType.UNIT );
setStart( imp );
return (T) imp;
} else if ( AccumulateImportDescrBuilder.class.isAssignableFrom( clazz ) ) {
AccumulateImportDescrBuilder imp = ctxBuilder == null ?
DescrFactory.newPackage().newAccumulateImport() :
((PackageDescrBuilder) ctxBuilder).newAccumulateImport();
pushParaphrases( DroolsParaphraseTypes.ACCUMULATE_IMPORT );
beginSentence( DroolsSentenceType.ACCUMULATE_IMPORT_STATEMENT );
setStart( imp );
return (T) imp;
} else if ( GlobalDescrBuilder.class.isAssignableFrom( clazz ) ) {
GlobalDescrBuilder global = ctxBuilder == null ?
DescrFactory.newPackage().newGlobal() :
((PackageDescrBuilder) ctxBuilder).newGlobal();
pushParaphrases( DroolsParaphraseTypes.GLOBAL );
beginSentence( DroolsSentenceType.GLOBAL );
setStart( global );
return (T) global;
} else if ( DeclareDescrBuilder.class.isAssignableFrom( clazz ) ) {
DeclareDescrBuilder declare = ctxBuilder == null ?
DescrFactory.newPackage().newDeclare() :
((PackageDescrBuilder) ctxBuilder).newDeclare();
return (T) declare;
} else if ( TypeDeclarationDescrBuilder.class.isAssignableFrom( clazz ) ) {
TypeDeclarationDescrBuilder declare = ctxBuilder == null ?
DescrFactory.newPackage().newDeclare().type() :
((DeclareDescrBuilder) ctxBuilder).type();
pushParaphrases( DroolsParaphraseTypes.TYPE_DECLARE );
beginSentence( DroolsSentenceType.TYPE_DECLARATION );
setStart( declare );
return (T) declare;
} else if ( EnumDeclarationDescrBuilder.class.isAssignableFrom( clazz ) ) {
EnumDeclarationDescrBuilder declare = ctxBuilder == null ?
DescrFactory.newPackage().newDeclare().enumerative() :
((DeclareDescrBuilder) ctxBuilder).enumerative();
pushParaphrases( DroolsParaphraseTypes.ENUM_DECLARE );
beginSentence( DroolsSentenceType.ENUM_DECLARATION );
setStart( declare );
return (T) declare;
}else if ( EntryPointDeclarationDescrBuilder.class.isAssignableFrom( clazz ) ) {
EntryPointDeclarationDescrBuilder declare = ctxBuilder == null ?
DescrFactory.newPackage().newDeclare().entryPoint() :
((DeclareDescrBuilder) ctxBuilder).entryPoint();
pushParaphrases( DroolsParaphraseTypes.ENTRYPOINT_DECLARE );
beginSentence( DroolsSentenceType.ENTRYPOINT_DECLARATION );
setStart( declare );
return (T) declare;
} else if ( WindowDeclarationDescrBuilder.class.isAssignableFrom( clazz ) ) {
WindowDeclarationDescrBuilder declare = ctxBuilder == null ?
DescrFactory.newPackage().newDeclare().window() :
((DeclareDescrBuilder) ctxBuilder).window();
pushParaphrases( DroolsParaphraseTypes.WINDOW_DECLARE );
beginSentence( DroolsSentenceType.WINDOW_DECLARATION );
setStart( declare );
return (T) declare;
} else if ( FieldDescrBuilder.class.isAssignableFrom( clazz ) ) {
FieldDescrBuilder field = ((AbstractClassTypeDeclarationBuilder) ctxBuilder).newField( param );
setStart( field );
return (T) field;
} else if ( EnumLiteralDescrBuilder.class.isAssignableFrom( clazz ) ) {
EnumLiteralDescrBuilder literal = ((EnumDeclarationDescrBuilder) ctxBuilder).newEnumLiteral( param );
setStart( literal );
return (T) literal;
} else if ( FunctionDescrBuilder.class.isAssignableFrom( clazz ) ) {
FunctionDescrBuilder function;
if ( ctxBuilder == null ) {
function = DescrFactory.newPackage().newFunction();
} else {
PackageDescrBuilder pkg = (PackageDescrBuilder) ctxBuilder;
function = pkg.newFunction().namespace( pkg.getDescr().getName() );
AttributeDescr attribute = pkg.getDescr().getAttribute( "dialect" );
if ( attribute != null ) {
function.dialect( attribute.getValue() );
}
}
pushParaphrases( DroolsParaphraseTypes.FUNCTION );
beginSentence( DroolsSentenceType.FUNCTION );
setStart( function );
return (T) function;
} else if ( RuleDescrBuilder.class.isAssignableFrom( clazz ) ) {
RuleDescrBuilder rule = ctxBuilder == null ?
DescrFactory.newPackage().newRule() :
((PackageDescrBuilder) ctxBuilder).newRule();
pushParaphrases( DroolsParaphraseTypes.RULE );
beginSentence( DroolsSentenceType.RULE );
setStart( rule );
return (T) rule;
} else if ( QueryDescrBuilder.class.isAssignableFrom( clazz ) ) {
QueryDescrBuilder query = ctxBuilder == null ?
DescrFactory.newPackage().newQuery() :
((PackageDescrBuilder) ctxBuilder).newQuery();
pushParaphrases( DroolsParaphraseTypes.QUERY );
beginSentence( DroolsSentenceType.QUERY );
setStart( query );
return (T) query;
} else if ( AttributeDescrBuilder.class.isAssignableFrom( clazz ) ) {
AttributeDescrBuilder< ? > attribute = ((AttributeSupportBuilder< ? >) ctxBuilder).attribute(param);
setStart( attribute );
return (T) attribute;
} else if ( EvalDescrBuilder.class.isAssignableFrom( clazz ) ) {
EvalDescrBuilder< ? > eval = ((CEDescrBuilder< ? , ? >) ctxBuilder).eval();
pushParaphrases( DroolsParaphraseTypes.EVAL );
beginSentence( DroolsSentenceType.EVAL );
setStart( eval );
return (T) eval;
} else if ( ForallDescrBuilder.class.isAssignableFrom( clazz ) ) {
ForallDescrBuilder< ? > forall = ((CEDescrBuilder< ? , ? >) ctxBuilder).forall();
setStart( forall );
return (T) forall;
} else if ( CEDescrBuilder.class.isAssignableFrom( clazz ) ) {
setStart( ctxBuilder );
return (T) ctxBuilder;
} else if ( PatternDescrBuilder.class.isAssignableFrom( clazz ) ) {
PatternDescrBuilder< ? > pattern = ((PatternContainerDescrBuilder< ? , ? >) ctxBuilder).pattern();
pushParaphrases( DroolsParaphraseTypes.PATTERN );
setStart( pattern );
return (T) pattern;
} else if ( CollectDescrBuilder.class.isAssignableFrom( clazz ) ) {
CollectDescrBuilder< ? > collect = ((PatternDescrBuilder< ? >) ctxBuilder).from().collect();
setStart( collect );
return (T) collect;
} else if ( GroupByDescrBuilder.class.isAssignableFrom(clazz) ) {
// GroupBy extends Accumulate and thus need to be before it
GroupByDescrBuilder< ? > groupBy = ((PatternDescrBuilder< ? >) ctxBuilder).from().groupBy();
setStart( groupBy );
return (T) groupBy;
} else if ( AccumulateDescrBuilder.class.isAssignableFrom( clazz ) ) {
AccumulateDescrBuilder< ? > accumulate = ((PatternDescrBuilder< ? >) ctxBuilder).from().accumulate();
setStart( accumulate );
return (T) accumulate;
} else if ( BehaviorDescrBuilder.class.isAssignableFrom( clazz ) ) {
BehaviorDescrBuilder< ? > behavior = ((PatternDescrBuilder< ? >) ctxBuilder).behavior();
setStart( behavior );
return (T) behavior;
} else if ( NamedConsequenceDescrBuilder.class.isAssignableFrom( clazz ) ) {
NamedConsequenceDescrBuilder< ? > namedConsequence = ((CEDescrBuilder< ? , ? >) ctxBuilder).namedConsequence();
setStart( namedConsequence );
return (T) namedConsequence;
} else if ( ConditionalBranchDescrBuilder.class.isAssignableFrom( clazz ) ) {
ConditionalBranchDescrBuilder< ? > conditionalBranch = ((CEDescrBuilder< ? , ? >) ctxBuilder).conditionalBranch();
setStart( conditionalBranch );
return (T) conditionalBranch;
}
}
return null;
}
@SuppressWarnings("unchecked")
public <T extends DescrBuilder< ? , ? >> T end( Class<T> clazz,
DescrBuilder< ? , ? > builder ) {
if ( state.backtracking == 0 ) {
if ( !(FieldDescrBuilder.class.isAssignableFrom( clazz ) ||
AttributeDescrBuilder.class.isAssignableFrom( clazz ) ||
CEDescrBuilder.class.isAssignableFrom( clazz ) ||
CollectDescrBuilder.class.isAssignableFrom( clazz ) ||
AccumulateDescrBuilder.class.isAssignableFrom( clazz ) ||
ForallDescrBuilder.class.isAssignableFrom( clazz ) ||
BehaviorDescrBuilder.class.isAssignableFrom( clazz ) ||
ConditionalBranchDescrBuilder.class.isAssignableFrom( clazz ) ||
NamedConsequenceDescrBuilder.class.isAssignableFrom( clazz )) ) {
popParaphrases();
}
if (RuleDescrBuilder.class.isAssignableFrom(clazz)) {
RuleDescrBuilder ruleDescrBuilder = (RuleDescrBuilder)builder;
ruleDescrBuilder.end().getDescr().afterRuleAdded(ruleDescrBuilder.getDescr());
}
setEnd( builder );
return (T) builder;
}
return null;
}
public String[] getStatementKeywords() {
return statementKeywords;
}
public static void logHalfConstraintWarn(String logicalConstraint, BaseDescr descr) {
if (descr instanceof RelationalExprDescr relational) {
String halfConstraintStr = logicalConstraint + " " + relational.getOperator() + " " + relational.getRight().toString();
logHalfConstraintWarn("The use of a half constraint '" + halfConstraintStr + "' is deprecated" +
" and will be removed in the future version (LanguageLevel.DRL10)." +
" Please add a left operand.");
}
}
public static void logHalfConstraintWarn(String message) {
if (logCounterHalfConstraint > 10) {
return; // suppress further warnings
}
logCounterHalfConstraint++;
LOG.warn(message);
if (logCounterHalfConstraint == 10) {
LOG.warn("Further warnings about half constraints will be suppressed.");
}
}
public static void logCustomOperatorWarn(Token token) {
if (logCounterCustomOperator > 1) {
return; // suppress further warnings
}
String operator = token.getText();
if (BUILT_IN_OPERATORS.contains(operator)) {
return; // built-in operator
}
logCounterCustomOperator++;
LOG.warn("Custom operator will require a prefix '##' in the future version (LanguageLevel.DRL10)." +
" If you use LanguageLevel.DRL10, you need to change '{}' to '##{}'." +
" You don't need to change the rule while you use the default LanguageLevel.DRL6.", operator, operator);
}
public static void logInfixOrWarn(CEDescrBuilder descrBuilder) {
if (logCounterInfixOr > 5) {
return; // suppress further warnings
}
Optional<RuleDescr> ruleDescrOpt = getParentRuleDescr(descrBuilder);
if (ruleDescrOpt.isEmpty()) {
return;
}
logCounterInfixOr++;
LOG.warn("Connecting patterns with '||' is deprecated and will be removed in the future version (LanguageLevel.DRL10)." +
" Please replace '||' with 'or' in rule '{}'. '||' in a constraint will remain supported.", ruleDescrOpt.get().getName());
if (logCounterInfixOr == 5) {
LOG.warn("Further warnings about '||' will be suppressed.");
}
}
public static void logInfixAndWarn(CEDescrBuilder descrBuilder) {
if (logCounterInfixAnd > 5) {
return; // suppress further warnings
}
Optional<RuleDescr> ruleDescrOpt = getParentRuleDescr(descrBuilder);
if (ruleDescrOpt.isEmpty()) {
return;
}
logCounterInfixAnd++;
LOG.warn("Connecting patterns with '&&' is deprecated and will be removed in the future version (LanguageLevel.DRL10)." +
" Please replace '&&' with 'and' in rule '{}'. '&&' in a constraint will remain supported.", ruleDescrOpt.get().getName());
if (logCounterInfixAnd == 5) {
LOG.warn("Further warnings about '&&' will be suppressed.");
}
}
private static Optional<RuleDescr> getParentRuleDescr(DescrBuilder<?, ?> descrBuilder) {
while (descrBuilder != null) {
if (descrBuilder instanceof RuleDescrBuilder) {
return Optional.of(((RuleDescrBuilder) descrBuilder).getDescr());
} else if (descrBuilder instanceof PackageDescrBuilder) {
return Optional.empty();
}
descrBuilder = descrBuilder.getParent();
}
return Optional.empty();
}
public static void logAnnotationInLhsPatternWarn(CEDescrBuilder descrBuilder) {
if (logCounterAnnotationInLhsPattern > 5) {
return; // suppress further warnings
}
Optional<RuleDescr> ruleDescrOpt = getParentRuleDescr(descrBuilder);
if (ruleDescrOpt.isEmpty()) {
return;
}
BaseDescr descr = descrBuilder.getDescr();
if (descr instanceof AnnotatedBaseDescr annotated) {
String annotationNames = annotated.getAnnotationNames().stream().collect(Collectors.joining(", "));
logCounterAnnotationInLhsPattern++;
LOG.warn("Annotation inside LHS patterns is deprecated and will be removed in the future version (LanguageLevel.DRL10)." +
" Found '{}' in rule '{}'. Annotation in other places will remain supported.", annotationNames, ruleDescrOpt.get().getName());
if (logCounterAnnotationInLhsPattern == 5) {
LOG.warn("Further warnings about Annotation inside LHS patterns will be suppressed.");
}
}
}
public static void logAgendaGroupWarn(AttributeDescr attributeDescr) {
if (logCounterAgendaGroup > 5) {
return; // suppress further warnings
}
logCounterAgendaGroup++;
LOG.warn("'agenda-group \"{}\"' is found. 'agenda-group' is deprecated and will be dropped in the future version (LanguageLevel.DRL10)." +
" Please replace 'agenda-group' with 'ruleflow-group', which works as the same as 'agenda-group'.", attributeDescr.getValue());
if (logCounterAgendaGroup == 5) {
LOG.warn("Further warnings about 'agenda-group' will be suppressed.");
}
}
}
|
openjdk/jdk8 | 36,882 | jdk/src/share/classes/java/util/concurrent/PriorityBlockingQueue.java | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package java.util.concurrent;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.AbstractQueue;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.SortedSet;
import java.util.Spliterator;
import java.util.function.Consumer;
/**
* An unbounded {@linkplain BlockingQueue blocking queue} that uses
* the same ordering rules as class {@link PriorityQueue} and supplies
* blocking retrieval operations. While this queue is logically
* unbounded, attempted additions may fail due to resource exhaustion
* (causing {@code OutOfMemoryError}). This class does not permit
* {@code null} elements. A priority queue relying on {@linkplain
* Comparable natural ordering} also does not permit insertion of
* non-comparable objects (doing so results in
* {@code ClassCastException}).
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
* Iterator} interfaces. The Iterator provided in method {@link
* #iterator()} is <em>not</em> guaranteed to traverse the elements of
* the PriorityBlockingQueue in any particular order. If you need
* ordered traversal, consider using
* {@code Arrays.sort(pq.toArray())}. Also, method {@code drainTo}
* can be used to <em>remove</em> some or all elements in priority
* order and place them in another collection.
*
* <p>Operations on this class make no guarantees about the ordering
* of elements with equal priority. If you need to enforce an
* ordering, you can define custom classes or comparators that use a
* secondary key to break ties in primary priority values. For
* example, here is a class that applies first-in-first-out
* tie-breaking to comparable elements. To use it, you would insert a
* {@code new FIFOEntry(anEntry)} instead of a plain entry object.
*
* <pre> {@code
* class FIFOEntry<E extends Comparable<? super E>>
* implements Comparable<FIFOEntry<E>> {
* static final AtomicLong seq = new AtomicLong(0);
* final long seqNum;
* final E entry;
* public FIFOEntry(E entry) {
* seqNum = seq.getAndIncrement();
* this.entry = entry;
* }
* public E getEntry() { return entry; }
* public int compareTo(FIFOEntry<E> other) {
* int res = entry.compareTo(other.entry);
* if (res == 0 && other.entry != this.entry)
* res = (seqNum < other.seqNum ? -1 : 1);
* return res;
* }
* }}</pre>
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @since 1.5
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
@SuppressWarnings("unchecked")
public class PriorityBlockingQueue<E> extends AbstractQueue<E>
implements BlockingQueue<E>, java.io.Serializable {
private static final long serialVersionUID = 5595510919245408276L;
/*
* The implementation uses an array-based binary heap, with public
* operations protected with a single lock. However, allocation
* during resizing uses a simple spinlock (used only while not
* holding main lock) in order to allow takes to operate
* concurrently with allocation. This avoids repeated
* postponement of waiting consumers and consequent element
* build-up. The need to back away from lock during allocation
* makes it impossible to simply wrap delegated
* java.util.PriorityQueue operations within a lock, as was done
* in a previous version of this class. To maintain
* interoperability, a plain PriorityQueue is still used during
* serialization, which maintains compatibility at the expense of
* transiently doubling overhead.
*/
/**
* Default array capacity.
*/
private static final int DEFAULT_INITIAL_CAPACITY = 11;
/**
* The maximum size of array to allocate.
* Some VMs reserve some header words in an array.
* Attempts to allocate larger arrays may result in
* OutOfMemoryError: Requested array size exceeds VM limit
*/
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
/**
* Priority queue represented as a balanced binary heap: the two
* children of queue[n] are queue[2*n+1] and queue[2*(n+1)]. The
* priority queue is ordered by comparator, or by the elements'
* natural ordering, if comparator is null: For each node n in the
* heap and each descendant d of n, n <= d. The element with the
* lowest value is in queue[0], assuming the queue is nonempty.
*/
private transient Object[] queue;
/**
* The number of elements in the priority queue.
*/
private transient int size;
/**
* The comparator, or null if priority queue uses elements'
* natural ordering.
*/
private transient Comparator<? super E> comparator;
/**
* Lock used for all public operations
*/
private final ReentrantLock lock;
/**
* Condition for blocking when empty
*/
private final Condition notEmpty;
/**
* Spinlock for allocation, acquired via CAS.
*/
private transient volatile int allocationSpinLock;
/**
* A plain PriorityQueue used only for serialization,
* to maintain compatibility with previous versions
* of this class. Non-null only during serialization/deserialization.
*/
private PriorityQueue<E> q;
/**
* Creates a {@code PriorityBlockingQueue} with the default
* initial capacity (11) that orders its elements according to
* their {@linkplain Comparable natural ordering}.
*/
public PriorityBlockingQueue() {
this(DEFAULT_INITIAL_CAPACITY, null);
}
/**
* Creates a {@code PriorityBlockingQueue} with the specified
* initial capacity that orders its elements according to their
* {@linkplain Comparable natural ordering}.
*
* @param initialCapacity the initial capacity for this priority queue
* @throws IllegalArgumentException if {@code initialCapacity} is less
* than 1
*/
public PriorityBlockingQueue(int initialCapacity) {
this(initialCapacity, null);
}
/**
* Creates a {@code PriorityBlockingQueue} with the specified initial
* capacity that orders its elements according to the specified
* comparator.
*
* @param initialCapacity the initial capacity for this priority queue
* @param comparator the comparator that will be used to order this
* priority queue. If {@code null}, the {@linkplain Comparable
* natural ordering} of the elements will be used.
* @throws IllegalArgumentException if {@code initialCapacity} is less
* than 1
*/
public PriorityBlockingQueue(int initialCapacity,
Comparator<? super E> comparator) {
if (initialCapacity < 1)
throw new IllegalArgumentException();
this.lock = new ReentrantLock();
this.notEmpty = lock.newCondition();
this.comparator = comparator;
this.queue = new Object[initialCapacity];
}
/**
* Creates a {@code PriorityBlockingQueue} containing the elements
* in the specified collection. If the specified collection is a
* {@link SortedSet} or a {@link PriorityQueue}, this
* priority queue will be ordered according to the same ordering.
* Otherwise, this priority queue will be ordered according to the
* {@linkplain Comparable natural ordering} of its elements.
*
* @param c the collection whose elements are to be placed
* into this priority queue
* @throws ClassCastException if elements of the specified collection
* cannot be compared to one another according to the priority
* queue's ordering
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public PriorityBlockingQueue(Collection<? extends E> c) {
this.lock = new ReentrantLock();
this.notEmpty = lock.newCondition();
boolean heapify = true; // true if not known to be in heap order
boolean screen = true; // true if must screen for nulls
if (c instanceof SortedSet<?>) {
SortedSet<? extends E> ss = (SortedSet<? extends E>) c;
this.comparator = (Comparator<? super E>) ss.comparator();
heapify = false;
}
else if (c instanceof PriorityBlockingQueue<?>) {
PriorityBlockingQueue<? extends E> pq =
(PriorityBlockingQueue<? extends E>) c;
this.comparator = (Comparator<? super E>) pq.comparator();
screen = false;
if (pq.getClass() == PriorityBlockingQueue.class) // exact match
heapify = false;
}
Object[] a = c.toArray();
int n = a.length;
// If c.toArray incorrectly doesn't return Object[], copy it.
if (a.getClass() != Object[].class)
a = Arrays.copyOf(a, n, Object[].class);
if (screen && (n == 1 || this.comparator != null)) {
for (int i = 0; i < n; ++i)
if (a[i] == null)
throw new NullPointerException();
}
this.queue = a;
this.size = n;
if (heapify)
heapify();
}
/**
* Tries to grow array to accommodate at least one more element
* (but normally expand by about 50%), giving up (allowing retry)
* on contention (which we expect to be rare). Call only while
* holding lock.
*
* @param array the heap array
* @param oldCap the length of the array
*/
private void tryGrow(Object[] array, int oldCap) {
lock.unlock(); // must release and then re-acquire main lock
Object[] newArray = null;
if (allocationSpinLock == 0 &&
UNSAFE.compareAndSwapInt(this, allocationSpinLockOffset,
0, 1)) {
try {
int newCap = oldCap + ((oldCap < 64) ?
(oldCap + 2) : // grow faster if small
(oldCap >> 1));
if (newCap - MAX_ARRAY_SIZE > 0) { // possible overflow
int minCap = oldCap + 1;
if (minCap < 0 || minCap > MAX_ARRAY_SIZE)
throw new OutOfMemoryError();
newCap = MAX_ARRAY_SIZE;
}
if (newCap > oldCap && queue == array)
newArray = new Object[newCap];
} finally {
allocationSpinLock = 0;
}
}
if (newArray == null) // back off if another thread is allocating
Thread.yield();
lock.lock();
if (newArray != null && queue == array) {
queue = newArray;
System.arraycopy(array, 0, newArray, 0, oldCap);
}
}
/**
* Mechanics for poll(). Call only while holding lock.
*/
private E dequeue() {
int n = size - 1;
if (n < 0)
return null;
else {
Object[] array = queue;
E result = (E) array[0];
E x = (E) array[n];
array[n] = null;
Comparator<? super E> cmp = comparator;
if (cmp == null)
siftDownComparable(0, x, array, n);
else
siftDownUsingComparator(0, x, array, n, cmp);
size = n;
return result;
}
}
/**
* Inserts item x at position k, maintaining heap invariant by
* promoting x up the tree until it is greater than or equal to
* its parent, or is the root.
*
* To simplify and speed up coercions and comparisons. the
* Comparable and Comparator versions are separated into different
* methods that are otherwise identical. (Similarly for siftDown.)
* These methods are static, with heap state as arguments, to
* simplify use in light of possible comparator exceptions.
*
* @param k the position to fill
* @param x the item to insert
* @param array the heap array
*/
private static <T> void siftUpComparable(int k, T x, Object[] array) {
Comparable<? super T> key = (Comparable<? super T>) x;
while (k > 0) {
int parent = (k - 1) >>> 1;
Object e = array[parent];
if (key.compareTo((T) e) >= 0)
break;
array[k] = e;
k = parent;
}
array[k] = key;
}
private static <T> void siftUpUsingComparator(int k, T x, Object[] array,
Comparator<? super T> cmp) {
while (k > 0) {
int parent = (k - 1) >>> 1;
Object e = array[parent];
if (cmp.compare(x, (T) e) >= 0)
break;
array[k] = e;
k = parent;
}
array[k] = x;
}
/**
* Inserts item x at position k, maintaining heap invariant by
* demoting x down the tree repeatedly until it is less than or
* equal to its children or is a leaf.
*
* @param k the position to fill
* @param x the item to insert
* @param array the heap array
* @param n heap size
*/
private static <T> void siftDownComparable(int k, T x, Object[] array,
int n) {
if (n > 0) {
Comparable<? super T> key = (Comparable<? super T>)x;
int half = n >>> 1; // loop while a non-leaf
while (k < half) {
int child = (k << 1) + 1; // assume left child is least
Object c = array[child];
int right = child + 1;
if (right < n &&
((Comparable<? super T>) c).compareTo((T) array[right]) > 0)
c = array[child = right];
if (key.compareTo((T) c) <= 0)
break;
array[k] = c;
k = child;
}
array[k] = key;
}
}
private static <T> void siftDownUsingComparator(int k, T x, Object[] array,
int n,
Comparator<? super T> cmp) {
if (n > 0) {
int half = n >>> 1;
while (k < half) {
int child = (k << 1) + 1;
Object c = array[child];
int right = child + 1;
if (right < n && cmp.compare((T) c, (T) array[right]) > 0)
c = array[child = right];
if (cmp.compare(x, (T) c) <= 0)
break;
array[k] = c;
k = child;
}
array[k] = x;
}
}
/**
* Establishes the heap invariant (described above) in the entire tree,
* assuming nothing about the order of the elements prior to the call.
*/
private void heapify() {
Object[] array = queue;
int n = size;
int half = (n >>> 1) - 1;
Comparator<? super E> cmp = comparator;
if (cmp == null) {
for (int i = half; i >= 0; i--)
siftDownComparable(i, (E) array[i], array, n);
}
else {
for (int i = half; i >= 0; i--)
siftDownUsingComparator(i, (E) array[i], array, n, cmp);
}
}
/**
* Inserts the specified element into this priority queue.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Collection#add})
* @throws ClassCastException if the specified element cannot be compared
* with elements currently in the priority queue according to the
* priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
return offer(e);
}
/**
* Inserts the specified element into this priority queue.
* As the queue is unbounded, this method will never return {@code false}.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Queue#offer})
* @throws ClassCastException if the specified element cannot be compared
* with elements currently in the priority queue according to the
* priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
if (e == null)
throw new NullPointerException();
final ReentrantLock lock = this.lock;
lock.lock();
int n, cap;
Object[] array;
while ((n = size) >= (cap = (array = queue).length))
tryGrow(array, cap);
try {
Comparator<? super E> cmp = comparator;
if (cmp == null)
siftUpComparable(n, e, array);
else
siftUpUsingComparator(n, e, array, cmp);
size = n + 1;
notEmpty.signal();
} finally {
lock.unlock();
}
return true;
}
/**
* Inserts the specified element into this priority queue.
* As the queue is unbounded, this method will never block.
*
* @param e the element to add
* @throws ClassCastException if the specified element cannot be compared
* with elements currently in the priority queue according to the
* priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public void put(E e) {
offer(e); // never need to block
}
/**
* Inserts the specified element into this priority queue.
* As the queue is unbounded, this method will never block or
* return {@code false}.
*
* @param e the element to add
* @param timeout This parameter is ignored as the method never blocks
* @param unit This parameter is ignored as the method never blocks
* @return {@code true} (as specified by
* {@link BlockingQueue#offer(Object,long,TimeUnit) BlockingQueue.offer})
* @throws ClassCastException if the specified element cannot be compared
* with elements currently in the priority queue according to the
* priority queue's ordering
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e, long timeout, TimeUnit unit) {
return offer(e); // never need to block
}
public E poll() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
return dequeue();
} finally {
lock.unlock();
}
}
public E take() throws InterruptedException {
final ReentrantLock lock = this.lock;
lock.lockInterruptibly();
E result;
try {
while ( (result = dequeue()) == null)
notEmpty.await();
} finally {
lock.unlock();
}
return result;
}
public E poll(long timeout, TimeUnit unit) throws InterruptedException {
long nanos = unit.toNanos(timeout);
final ReentrantLock lock = this.lock;
lock.lockInterruptibly();
E result;
try {
while ( (result = dequeue()) == null && nanos > 0)
nanos = notEmpty.awaitNanos(nanos);
} finally {
lock.unlock();
}
return result;
}
public E peek() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
return (size == 0) ? null : (E) queue[0];
} finally {
lock.unlock();
}
}
/**
* Returns the comparator used to order the elements in this queue,
* or {@code null} if this queue uses the {@linkplain Comparable
* natural ordering} of its elements.
*
* @return the comparator used to order the elements in this queue,
* or {@code null} if this queue uses the natural
* ordering of its elements
*/
public Comparator<? super E> comparator() {
return comparator;
}
public int size() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
return size;
} finally {
lock.unlock();
}
}
/**
* Always returns {@code Integer.MAX_VALUE} because
* a {@code PriorityBlockingQueue} is not capacity constrained.
* @return {@code Integer.MAX_VALUE} always
*/
public int remainingCapacity() {
return Integer.MAX_VALUE;
}
private int indexOf(Object o) {
if (o != null) {
Object[] array = queue;
int n = size;
for (int i = 0; i < n; i++)
if (o.equals(array[i]))
return i;
}
return -1;
}
/**
* Removes the ith element from queue.
*/
private void removeAt(int i) {
Object[] array = queue;
int n = size - 1;
if (n == i) // removed last element
array[i] = null;
else {
E moved = (E) array[n];
array[n] = null;
Comparator<? super E> cmp = comparator;
if (cmp == null)
siftDownComparable(i, moved, array, n);
else
siftDownUsingComparator(i, moved, array, n, cmp);
if (array[i] == moved) {
if (cmp == null)
siftUpComparable(i, moved, array);
else
siftUpUsingComparator(i, moved, array, cmp);
}
}
size = n;
}
/**
* Removes a single instance of the specified element from this queue,
* if it is present. More formally, removes an element {@code e} such
* that {@code o.equals(e)}, if this queue contains one or more such
* elements. Returns {@code true} if and only if this queue contained
* the specified element (or equivalently, if this queue changed as a
* result of the call).
*
* @param o element to be removed from this queue, if present
* @return {@code true} if this queue changed as a result of the call
*/
public boolean remove(Object o) {
final ReentrantLock lock = this.lock;
lock.lock();
try {
int i = indexOf(o);
if (i == -1)
return false;
removeAt(i);
return true;
} finally {
lock.unlock();
}
}
/**
* Identity-based version for use in Itr.remove
*/
void removeEQ(Object o) {
final ReentrantLock lock = this.lock;
lock.lock();
try {
Object[] array = queue;
for (int i = 0, n = size; i < n; i++) {
if (o == array[i]) {
removeAt(i);
break;
}
}
} finally {
lock.unlock();
}
}
/**
* Returns {@code true} if this queue contains the specified element.
* More formally, returns {@code true} if and only if this queue contains
* at least one element {@code e} such that {@code o.equals(e)}.
*
* @param o object to be checked for containment in this queue
* @return {@code true} if this queue contains the specified element
*/
public boolean contains(Object o) {
final ReentrantLock lock = this.lock;
lock.lock();
try {
return indexOf(o) != -1;
} finally {
lock.unlock();
}
}
/**
* Returns an array containing all of the elements in this queue.
* The returned array elements are in no particular order.
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this queue. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
return Arrays.copyOf(queue, size);
} finally {
lock.unlock();
}
}
public String toString() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
int n = size;
if (n == 0)
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (int i = 0; i < n; ++i) {
Object e = queue[i];
sb.append(e == this ? "(this Collection)" : e);
if (i != n - 1)
sb.append(',').append(' ');
}
return sb.append(']').toString();
} finally {
lock.unlock();
}
}
/**
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
public int drainTo(Collection<? super E> c) {
return drainTo(c, Integer.MAX_VALUE);
}
/**
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
if (maxElements <= 0)
return 0;
final ReentrantLock lock = this.lock;
lock.lock();
try {
int n = Math.min(size, maxElements);
for (int i = 0; i < n; i++) {
c.add((E) queue[0]); // In this order, in case add() throws.
dequeue();
}
return n;
} finally {
lock.unlock();
}
}
/**
* Atomically removes all of the elements from this queue.
* The queue will be empty after this call returns.
*/
public void clear() {
final ReentrantLock lock = this.lock;
lock.lock();
try {
Object[] array = queue;
int n = size;
size = 0;
for (int i = 0; i < n; i++)
array[i] = null;
} finally {
lock.unlock();
}
}
/**
* Returns an array containing all of the elements in this queue; the
* runtime type of the returned array is that of the specified array.
* The returned array elements are in no particular order.
* If the queue fits in the specified array, it is returned therein.
* Otherwise, a new array is allocated with the runtime type of the
* specified array and the size of this queue.
*
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
* {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose {@code x} is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
* allocated array of {@code String}:
*
* <pre> {@code String[] y = x.toArray(new String[0]);}</pre>
*
* Note that {@code toArray(new Object[0])} is identical in function to
* {@code toArray()}.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose
* @return an array containing all of the elements in this queue
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this queue
* @throws NullPointerException if the specified array is null
*/
public <T> T[] toArray(T[] a) {
final ReentrantLock lock = this.lock;
lock.lock();
try {
int n = size;
if (a.length < n)
// Make a new array of a's runtime type, but my contents:
return (T[]) Arrays.copyOf(queue, size, a.getClass());
System.arraycopy(queue, 0, a, 0, n);
if (a.length > n)
a[n] = null;
return a;
} finally {
lock.unlock();
}
}
/**
* Returns an iterator over the elements in this queue. The
* iterator does not return the elements in any particular order.
*
* <p>The returned iterator is
* <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
*
* @return an iterator over the elements in this queue
*/
public Iterator<E> iterator() {
return new Itr(toArray());
}
/**
* Snapshot iterator that works off copy of underlying q array.
*/
final class Itr implements Iterator<E> {
final Object[] array; // Array of all elements
int cursor; // index of next element to return
int lastRet; // index of last element, or -1 if no such
Itr(Object[] array) {
lastRet = -1;
this.array = array;
}
public boolean hasNext() {
return cursor < array.length;
}
public E next() {
if (cursor >= array.length)
throw new NoSuchElementException();
lastRet = cursor;
return (E)array[cursor++];
}
public void remove() {
if (lastRet < 0)
throw new IllegalStateException();
removeEQ(array[lastRet]);
lastRet = -1;
}
}
/**
* Saves this queue to a stream (that is, serializes it).
*
* For compatibility with previous version of this class, elements
* are first copied to a java.util.PriorityQueue, which is then
* serialized.
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
lock.lock();
try {
// avoid zero capacity argument
q = new PriorityQueue<E>(Math.max(size, 1), comparator);
q.addAll(this);
s.defaultWriteObject();
} finally {
q = null;
lock.unlock();
}
}
/**
* Reconstitutes this queue from a stream (that is, deserializes it).
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
try {
s.defaultReadObject();
this.queue = new Object[q.size()];
comparator = q.comparator();
addAll(q);
} finally {
q = null;
}
}
// Similar to Collections.ArraySnapshotSpliterator but avoids
// commitment to toArray until needed
static final class PBQSpliterator<E> implements Spliterator<E> {
final PriorityBlockingQueue<E> queue;
Object[] array;
int index;
int fence;
PBQSpliterator(PriorityBlockingQueue<E> queue, Object[] array,
int index, int fence) {
this.queue = queue;
this.array = array;
this.index = index;
this.fence = fence;
}
final int getFence() {
int hi;
if ((hi = fence) < 0)
hi = fence = (array = queue.toArray()).length;
return hi;
}
public Spliterator<E> trySplit() {
int hi = getFence(), lo = index, mid = (lo + hi) >>> 1;
return (lo >= mid) ? null :
new PBQSpliterator<E>(queue, array, lo, index = mid);
}
@SuppressWarnings("unchecked")
public void forEachRemaining(Consumer<? super E> action) {
Object[] a; int i, hi; // hoist accesses and checks from loop
if (action == null)
throw new NullPointerException();
if ((a = array) == null)
fence = (a = queue.toArray()).length;
if ((hi = fence) <= a.length &&
(i = index) >= 0 && i < (index = hi)) {
do { action.accept((E)a[i]); } while (++i < hi);
}
}
public boolean tryAdvance(Consumer<? super E> action) {
if (action == null)
throw new NullPointerException();
if (getFence() > index && index >= 0) {
@SuppressWarnings("unchecked") E e = (E) array[index++];
action.accept(e);
return true;
}
return false;
}
public long estimateSize() { return (long)(getFence() - index); }
public int characteristics() {
return Spliterator.NONNULL | Spliterator.SIZED | Spliterator.SUBSIZED;
}
}
/**
* Returns a {@link Spliterator} over the elements in this queue.
*
* <p>The returned spliterator is
* <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
*
* <p>The {@code Spliterator} reports {@link Spliterator#SIZED} and
* {@link Spliterator#NONNULL}.
*
* @implNote
* The {@code Spliterator} additionally reports {@link Spliterator#SUBSIZED}.
*
* @return a {@code Spliterator} over the elements in this queue
* @since 1.8
*/
public Spliterator<E> spliterator() {
return new PBQSpliterator<E>(this, null, 0, -1);
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long allocationSpinLockOffset;
static {
try {
UNSAFE = sun.misc.Unsafe.getUnsafe();
Class<?> k = PriorityBlockingQueue.class;
allocationSpinLockOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("allocationSpinLock"));
} catch (Exception e) {
throw new Error(e);
}
}
}
|
googleapis/google-api-java-client-services | 36,869 | clients/google-api-services-mybusinessbusinesscalls/v1/2.0.0/com/google/api/services/mybusinessbusinesscalls/v1/MyBusinessBusinessCalls.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.mybusinessbusinesscalls.v1;
/**
* Service definition for MyBusinessBusinessCalls (v1).
*
* <p>
* The My Business Business Calls API manages business calls information of a location on Google and collect insights like the number of missed calls to their location. Additional information about Business calls can be found at https://support.google.com/business/answer/9688285?p=call_history. If the Google Business Profile links to a Google Ads account and call history is turned on, calls that last longer than a specific time, and that can be attributed to an ad interaction, will show in the linked Google Ads account under the "Calls from Ads" conversion. If smart bidding and call conversions are used in the optimization strategy, there could be a change in ad spend. Learn more about smart bidding. To view and perform actions on a location's calls, you need to be a `OWNER`, `CO_OWNER` or `MANAGER` of the location. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/my-business/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link MyBusinessBusinessCallsRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class MyBusinessBusinessCalls extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
(com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) ||
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"2.0.0 of the My Business Business Calls API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://mybusinessbusinesscalls.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://mybusinessbusinesscalls.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public MyBusinessBusinessCalls(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
MyBusinessBusinessCalls(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Locations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code MyBusinessBusinessCalls mybusinessbusinesscalls = new MyBusinessBusinessCalls(...);}
* {@code MyBusinessBusinessCalls.Locations.List request = mybusinessbusinesscalls.locations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Locations locations() {
return new Locations();
}
/**
* The "locations" collection of methods.
*/
public class Locations {
/**
* Returns the Business calls settings resource for the given location.
*
* Create a request for the method "locations.getBusinesscallssettings".
*
* This request holds the parameters needed by the mybusinessbusinesscalls server. After setting
* any optional parameters, call the {@link GetBusinesscallssettings#execute()} method to invoke the
* remote operation.
*
* @param name Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call
* settings to get. Format: locations/{location_id}/businesscallssettings.
* @return the request
*/
public GetBusinesscallssettings getBusinesscallssettings(java.lang.String name) throws java.io.IOException {
GetBusinesscallssettings result = new GetBusinesscallssettings(name);
initialize(result);
return result;
}
public class GetBusinesscallssettings extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings> {
private static final String REST_PATH = "v1/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^locations/[^/]+/businesscallssettings$");
/**
* Returns the Business calls settings resource for the given location.
*
* Create a request for the method "locations.getBusinesscallssettings".
*
* This request holds the parameters needed by the the mybusinessbusinesscalls server. After
* setting any optional parameters, call the {@link GetBusinesscallssettings#execute()} method to
* invoke the remote operation. <p> {@link GetBusinesscallssettings#initialize(com.google.api.clie
* nt.googleapis.services.AbstractGoogleClientRequest)} must be called to initialize this instance
* immediately after invoking the constructor. </p>
*
* @param name Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call
* settings to get. Format: locations/{location_id}/businesscallssettings.
* @since 1.13
*/
protected GetBusinesscallssettings(java.lang.String name) {
super(MyBusinessBusinessCalls.this, "GET", REST_PATH, null, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^locations/[^/]+/businesscallssettings$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public GetBusinesscallssettings set$Xgafv(java.lang.String $Xgafv) {
return (GetBusinesscallssettings) super.set$Xgafv($Xgafv);
}
@Override
public GetBusinesscallssettings setAccessToken(java.lang.String accessToken) {
return (GetBusinesscallssettings) super.setAccessToken(accessToken);
}
@Override
public GetBusinesscallssettings setAlt(java.lang.String alt) {
return (GetBusinesscallssettings) super.setAlt(alt);
}
@Override
public GetBusinesscallssettings setCallback(java.lang.String callback) {
return (GetBusinesscallssettings) super.setCallback(callback);
}
@Override
public GetBusinesscallssettings setFields(java.lang.String fields) {
return (GetBusinesscallssettings) super.setFields(fields);
}
@Override
public GetBusinesscallssettings setKey(java.lang.String key) {
return (GetBusinesscallssettings) super.setKey(key);
}
@Override
public GetBusinesscallssettings setOauthToken(java.lang.String oauthToken) {
return (GetBusinesscallssettings) super.setOauthToken(oauthToken);
}
@Override
public GetBusinesscallssettings setPrettyPrint(java.lang.Boolean prettyPrint) {
return (GetBusinesscallssettings) super.setPrettyPrint(prettyPrint);
}
@Override
public GetBusinesscallssettings setQuotaUser(java.lang.String quotaUser) {
return (GetBusinesscallssettings) super.setQuotaUser(quotaUser);
}
@Override
public GetBusinesscallssettings setUploadType(java.lang.String uploadType) {
return (GetBusinesscallssettings) super.setUploadType(uploadType);
}
@Override
public GetBusinesscallssettings setUploadProtocol(java.lang.String uploadProtocol) {
return (GetBusinesscallssettings) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The BusinessCallsSettings to get. The `name` field is used to identify the
* business call settings to get. Format: locations/{location_id}/businesscallssettings.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call
settings to get. Format: locations/{location_id}/businesscallssettings.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The BusinessCallsSettings to get. The `name` field is used to identify the
* business call settings to get. Format: locations/{location_id}/businesscallssettings.
*/
public GetBusinesscallssettings setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^locations/[^/]+/businesscallssettings$");
}
this.name = name;
return this;
}
@Override
public GetBusinesscallssettings set(String parameterName, Object value) {
return (GetBusinesscallssettings) super.set(parameterName, value);
}
}
/**
* Updates the Business call settings for the specified location.
*
* Create a request for the method "locations.updateBusinesscallssettings".
*
* This request holds the parameters needed by the mybusinessbusinesscalls server. After setting
* any optional parameters, call the {@link UpdateBusinesscallssettings#execute()} method to invoke
* the remote operation.
*
* @param name Required. The resource name of the calls settings. Format:
* locations/{location}/businesscallssettings
* @param content the {@link com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings}
* @return the request
*/
public UpdateBusinesscallssettings updateBusinesscallssettings(java.lang.String name, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings content) throws java.io.IOException {
UpdateBusinesscallssettings result = new UpdateBusinesscallssettings(name, content);
initialize(result);
return result;
}
public class UpdateBusinesscallssettings extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings> {
private static final String REST_PATH = "v1/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^locations/[^/]+/businesscallssettings$");
/**
* Updates the Business call settings for the specified location.
*
* Create a request for the method "locations.updateBusinesscallssettings".
*
* This request holds the parameters needed by the the mybusinessbusinesscalls server. After
* setting any optional parameters, call the {@link UpdateBusinesscallssettings#execute()} method
* to invoke the remote operation. <p> {@link UpdateBusinesscallssettings#initialize(com.google.ap
* i.client.googleapis.services.AbstractGoogleClientRequest)} must be called to initialize this
* instance immediately after invoking the constructor. </p>
*
* @param name Required. The resource name of the calls settings. Format:
* locations/{location}/businesscallssettings
* @param content the {@link com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings}
* @since 1.13
*/
protected UpdateBusinesscallssettings(java.lang.String name, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings content) {
super(MyBusinessBusinessCalls.this, "PATCH", REST_PATH, content, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^locations/[^/]+/businesscallssettings$");
}
}
@Override
public UpdateBusinesscallssettings set$Xgafv(java.lang.String $Xgafv) {
return (UpdateBusinesscallssettings) super.set$Xgafv($Xgafv);
}
@Override
public UpdateBusinesscallssettings setAccessToken(java.lang.String accessToken) {
return (UpdateBusinesscallssettings) super.setAccessToken(accessToken);
}
@Override
public UpdateBusinesscallssettings setAlt(java.lang.String alt) {
return (UpdateBusinesscallssettings) super.setAlt(alt);
}
@Override
public UpdateBusinesscallssettings setCallback(java.lang.String callback) {
return (UpdateBusinesscallssettings) super.setCallback(callback);
}
@Override
public UpdateBusinesscallssettings setFields(java.lang.String fields) {
return (UpdateBusinesscallssettings) super.setFields(fields);
}
@Override
public UpdateBusinesscallssettings setKey(java.lang.String key) {
return (UpdateBusinesscallssettings) super.setKey(key);
}
@Override
public UpdateBusinesscallssettings setOauthToken(java.lang.String oauthToken) {
return (UpdateBusinesscallssettings) super.setOauthToken(oauthToken);
}
@Override
public UpdateBusinesscallssettings setPrettyPrint(java.lang.Boolean prettyPrint) {
return (UpdateBusinesscallssettings) super.setPrettyPrint(prettyPrint);
}
@Override
public UpdateBusinesscallssettings setQuotaUser(java.lang.String quotaUser) {
return (UpdateBusinesscallssettings) super.setQuotaUser(quotaUser);
}
@Override
public UpdateBusinesscallssettings setUploadType(java.lang.String uploadType) {
return (UpdateBusinesscallssettings) super.setUploadType(uploadType);
}
@Override
public UpdateBusinesscallssettings setUploadProtocol(java.lang.String uploadProtocol) {
return (UpdateBusinesscallssettings) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The resource name of the calls settings. Format:
* locations/{location}/businesscallssettings
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The resource name of the calls settings. Format:
locations/{location}/businesscallssettings
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The resource name of the calls settings. Format:
* locations/{location}/businesscallssettings
*/
public UpdateBusinesscallssettings setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^locations/[^/]+/businesscallssettings$");
}
this.name = name;
return this;
}
/** Required. The list of fields to update. */
@com.google.api.client.util.Key
private String updateMask;
/** Required. The list of fields to update.
*/
public String getUpdateMask() {
return updateMask;
}
/** Required. The list of fields to update. */
public UpdateBusinesscallssettings setUpdateMask(String updateMask) {
this.updateMask = updateMask;
return this;
}
@Override
public UpdateBusinesscallssettings set(String parameterName, Object value) {
return (UpdateBusinesscallssettings) super.set(parameterName, value);
}
}
/**
* An accessor for creating requests from the Businesscallsinsights collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code MyBusinessBusinessCalls mybusinessbusinesscalls = new MyBusinessBusinessCalls(...);}
* {@code MyBusinessBusinessCalls.Businesscallsinsights.List request = mybusinessbusinesscalls.businesscallsinsights().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Businesscallsinsights businesscallsinsights() {
return new Businesscallsinsights();
}
/**
* The "businesscallsinsights" collection of methods.
*/
public class Businesscallsinsights {
/**
* Returns insights for Business calls for a location.
*
* Create a request for the method "businesscallsinsights.list".
*
* This request holds the parameters needed by the mybusinessbusinesscalls server. After setting
* any optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param parent Required. The parent location to fetch calls insights for. Format: locations/{location_id}
* @return the request
*/
public List list(java.lang.String parent) throws java.io.IOException {
List result = new List(parent);
initialize(result);
return result;
}
public class List extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.ListBusinessCallsInsightsResponse> {
private static final String REST_PATH = "v1/{+parent}/businesscallsinsights";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^locations/[^/]+$");
/**
* Returns insights for Business calls for a location.
*
* Create a request for the method "businesscallsinsights.list".
*
* This request holds the parameters needed by the the mybusinessbusinesscalls server. After
* setting any optional parameters, call the {@link List#execute()} method to invoke the remote
* operation. <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. The parent location to fetch calls insights for. Format: locations/{location_id}
* @since 1.13
*/
protected List(java.lang.String parent) {
super(MyBusinessBusinessCalls.this, "GET", REST_PATH, null, com.google.api.services.mybusinessbusinesscalls.v1.model.ListBusinessCallsInsightsResponse.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^locations/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The parent location to fetch calls insights for. Format:
* locations/{location_id}
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. The parent location to fetch calls insights for. Format: locations/{location_id}
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. The parent location to fetch calls insights for. Format:
* locations/{location_id}
*/
public List setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^locations/[^/]+$");
}
this.parent = parent;
return this;
}
/**
* Optional. A filter constraining the calls insights to return. The response includes only
* entries that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is
* returned. If no end_date is provided, the last date for which data is available is used.
* If no start_date is provided, we will default to the first date for which data is
* available, which is currently 6 months. If start_date is before the date when data is
* available, data is returned starting from the date when it is available. At this time we
* support following filters. 1. start_date="DATE" where date is in YYYY-MM-DD format. 2.
* end_date="DATE" where date is in YYYY-MM-DD format. 3. metric_type=XYZ where XYZ is a
* valid MetricType. 4. Conjunctions(AND) of all of the above. e.g., "start_date=2021-08-01
* AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT metric_type
* ignores the DD part of the date.
*/
@com.google.api.client.util.Key
private java.lang.String filter;
/** Optional. A filter constraining the calls insights to return. The response includes only entries
that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is returned. If no
end_date is provided, the last date for which data is available is used. If no start_date is
provided, we will default to the first date for which data is available, which is currently 6
months. If start_date is before the date when data is available, data is returned starting from the
date when it is available. At this time we support following filters. 1. start_date="DATE" where
date is in YYYY-MM-DD format. 2. end_date="DATE" where date is in YYYY-MM-DD format. 3.
metric_type=XYZ where XYZ is a valid MetricType. 4. Conjunctions(AND) of all of the above. e.g.,
"start_date=2021-08-01 AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT
metric_type ignores the DD part of the date.
*/
public java.lang.String getFilter() {
return filter;
}
/**
* Optional. A filter constraining the calls insights to return. The response includes only
* entries that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is
* returned. If no end_date is provided, the last date for which data is available is used.
* If no start_date is provided, we will default to the first date for which data is
* available, which is currently 6 months. If start_date is before the date when data is
* available, data is returned starting from the date when it is available. At this time we
* support following filters. 1. start_date="DATE" where date is in YYYY-MM-DD format. 2.
* end_date="DATE" where date is in YYYY-MM-DD format. 3. metric_type=XYZ where XYZ is a
* valid MetricType. 4. Conjunctions(AND) of all of the above. e.g., "start_date=2021-08-01
* AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT metric_type
* ignores the DD part of the date.
*/
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/**
* Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most
* 20 will be returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single
* page. For these metrics, the page_size is ignored.
*/
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most 20 will be
returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single page. For these metrics,
the page_size is ignored.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/**
* Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most
* 20 will be returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single
* page. For these metrics, the page_size is ignored.
*/
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* Optional. A page token, received from a previous `ListBusinessCallsInsights` call.
* Provide this to retrieve the subsequent page. When paginating, all other parameters
* provided to `ListBusinessCallsInsights` must match the call that provided the page token.
* Some of the metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics,
* the pake_token is ignored.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** Optional. A page token, received from a previous `ListBusinessCallsInsights` call. Provide this to
retrieve the subsequent page. When paginating, all other parameters provided to
`ListBusinessCallsInsights` must match the call that provided the page token. Some of the
metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics, the pake_token is
ignored.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* Optional. A page token, received from a previous `ListBusinessCallsInsights` call.
* Provide this to retrieve the subsequent page. When paginating, all other parameters
* provided to `ListBusinessCallsInsights` must match the call that provided the page token.
* Some of the metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics,
* the pake_token is ignored.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link MyBusinessBusinessCalls}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link MyBusinessBusinessCalls}. */
@Override
public MyBusinessBusinessCalls build() {
return new MyBusinessBusinessCalls(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link MyBusinessBusinessCallsRequestInitializer}.
*
* @since 1.12
*/
public Builder setMyBusinessBusinessCallsRequestInitializer(
MyBusinessBusinessCallsRequestInitializer mybusinessbusinesscallsRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(mybusinessbusinesscallsRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 36,982 | java-developerconnect/google-cloud-developerconnect/src/main/java/com/google/cloud/developerconnect/v1/stub/InsightsConfigServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.developerconnect.v1.stub;
import static com.google.cloud.developerconnect.v1.InsightsConfigServiceClient.ListInsightsConfigsPagedResponse;
import static com.google.cloud.developerconnect.v1.InsightsConfigServiceClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.developerconnect.v1.CreateInsightsConfigRequest;
import com.google.cloud.developerconnect.v1.DeleteInsightsConfigRequest;
import com.google.cloud.developerconnect.v1.GetInsightsConfigRequest;
import com.google.cloud.developerconnect.v1.InsightsConfig;
import com.google.cloud.developerconnect.v1.ListInsightsConfigsRequest;
import com.google.cloud.developerconnect.v1.ListInsightsConfigsResponse;
import com.google.cloud.developerconnect.v1.OperationMetadata;
import com.google.cloud.developerconnect.v1.UpdateInsightsConfigRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link InsightsConfigServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (developerconnect.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getInsightsConfig:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InsightsConfigServiceStubSettings.Builder insightsConfigServiceSettingsBuilder =
* InsightsConfigServiceStubSettings.newBuilder();
* insightsConfigServiceSettingsBuilder
* .getInsightsConfigSettings()
* .setRetrySettings(
* insightsConfigServiceSettingsBuilder
* .getInsightsConfigSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* InsightsConfigServiceStubSettings insightsConfigServiceSettings =
* insightsConfigServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createInsightsConfig:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InsightsConfigServiceStubSettings.Builder insightsConfigServiceSettingsBuilder =
* InsightsConfigServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* insightsConfigServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class InsightsConfigServiceStubSettings
extends StubSettings<InsightsConfigServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<
ListInsightsConfigsRequest, ListInsightsConfigsResponse, ListInsightsConfigsPagedResponse>
listInsightsConfigsSettings;
private final UnaryCallSettings<CreateInsightsConfigRequest, Operation>
createInsightsConfigSettings;
private final OperationCallSettings<
CreateInsightsConfigRequest, InsightsConfig, OperationMetadata>
createInsightsConfigOperationSettings;
private final UnaryCallSettings<GetInsightsConfigRequest, InsightsConfig>
getInsightsConfigSettings;
private final UnaryCallSettings<UpdateInsightsConfigRequest, Operation>
updateInsightsConfigSettings;
private final OperationCallSettings<
UpdateInsightsConfigRequest, InsightsConfig, OperationMetadata>
updateInsightsConfigOperationSettings;
private final UnaryCallSettings<DeleteInsightsConfigRequest, Operation>
deleteInsightsConfigSettings;
private final OperationCallSettings<DeleteInsightsConfigRequest, Empty, OperationMetadata>
deleteInsightsConfigOperationSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<
ListInsightsConfigsRequest, ListInsightsConfigsResponse, InsightsConfig>
LIST_INSIGHTS_CONFIGS_PAGE_STR_DESC =
new PagedListDescriptor<
ListInsightsConfigsRequest, ListInsightsConfigsResponse, InsightsConfig>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListInsightsConfigsRequest injectToken(
ListInsightsConfigsRequest payload, String token) {
return ListInsightsConfigsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListInsightsConfigsRequest injectPageSize(
ListInsightsConfigsRequest payload, int pageSize) {
return ListInsightsConfigsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListInsightsConfigsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListInsightsConfigsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<InsightsConfig> extractResources(ListInsightsConfigsResponse payload) {
return payload.getInsightsConfigsList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListInsightsConfigsRequest, ListInsightsConfigsResponse, ListInsightsConfigsPagedResponse>
LIST_INSIGHTS_CONFIGS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListInsightsConfigsRequest,
ListInsightsConfigsResponse,
ListInsightsConfigsPagedResponse>() {
@Override
public ApiFuture<ListInsightsConfigsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListInsightsConfigsRequest, ListInsightsConfigsResponse> callable,
ListInsightsConfigsRequest request,
ApiCallContext context,
ApiFuture<ListInsightsConfigsResponse> futureResponse) {
PageContext<ListInsightsConfigsRequest, ListInsightsConfigsResponse, InsightsConfig>
pageContext =
PageContext.create(
callable, LIST_INSIGHTS_CONFIGS_PAGE_STR_DESC, request, context);
return ListInsightsConfigsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listInsightsConfigs. */
public PagedCallSettings<
ListInsightsConfigsRequest, ListInsightsConfigsResponse, ListInsightsConfigsPagedResponse>
listInsightsConfigsSettings() {
return listInsightsConfigsSettings;
}
/** Returns the object with the settings used for calls to createInsightsConfig. */
public UnaryCallSettings<CreateInsightsConfigRequest, Operation> createInsightsConfigSettings() {
return createInsightsConfigSettings;
}
/** Returns the object with the settings used for calls to createInsightsConfig. */
public OperationCallSettings<CreateInsightsConfigRequest, InsightsConfig, OperationMetadata>
createInsightsConfigOperationSettings() {
return createInsightsConfigOperationSettings;
}
/** Returns the object with the settings used for calls to getInsightsConfig. */
public UnaryCallSettings<GetInsightsConfigRequest, InsightsConfig> getInsightsConfigSettings() {
return getInsightsConfigSettings;
}
/** Returns the object with the settings used for calls to updateInsightsConfig. */
public UnaryCallSettings<UpdateInsightsConfigRequest, Operation> updateInsightsConfigSettings() {
return updateInsightsConfigSettings;
}
/** Returns the object with the settings used for calls to updateInsightsConfig. */
public OperationCallSettings<UpdateInsightsConfigRequest, InsightsConfig, OperationMetadata>
updateInsightsConfigOperationSettings() {
return updateInsightsConfigOperationSettings;
}
/** Returns the object with the settings used for calls to deleteInsightsConfig. */
public UnaryCallSettings<DeleteInsightsConfigRequest, Operation> deleteInsightsConfigSettings() {
return deleteInsightsConfigSettings;
}
/** Returns the object with the settings used for calls to deleteInsightsConfig. */
public OperationCallSettings<DeleteInsightsConfigRequest, Empty, OperationMetadata>
deleteInsightsConfigOperationSettings() {
return deleteInsightsConfigOperationSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public InsightsConfigServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcInsightsConfigServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonInsightsConfigServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "developerconnect";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "developerconnect.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "developerconnect.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(InsightsConfigServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(InsightsConfigServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return InsightsConfigServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected InsightsConfigServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listInsightsConfigsSettings = settingsBuilder.listInsightsConfigsSettings().build();
createInsightsConfigSettings = settingsBuilder.createInsightsConfigSettings().build();
createInsightsConfigOperationSettings =
settingsBuilder.createInsightsConfigOperationSettings().build();
getInsightsConfigSettings = settingsBuilder.getInsightsConfigSettings().build();
updateInsightsConfigSettings = settingsBuilder.updateInsightsConfigSettings().build();
updateInsightsConfigOperationSettings =
settingsBuilder.updateInsightsConfigOperationSettings().build();
deleteInsightsConfigSettings = settingsBuilder.deleteInsightsConfigSettings().build();
deleteInsightsConfigOperationSettings =
settingsBuilder.deleteInsightsConfigOperationSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for InsightsConfigServiceStubSettings. */
public static class Builder
extends StubSettings.Builder<InsightsConfigServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListInsightsConfigsRequest,
ListInsightsConfigsResponse,
ListInsightsConfigsPagedResponse>
listInsightsConfigsSettings;
private final UnaryCallSettings.Builder<CreateInsightsConfigRequest, Operation>
createInsightsConfigSettings;
private final OperationCallSettings.Builder<
CreateInsightsConfigRequest, InsightsConfig, OperationMetadata>
createInsightsConfigOperationSettings;
private final UnaryCallSettings.Builder<GetInsightsConfigRequest, InsightsConfig>
getInsightsConfigSettings;
private final UnaryCallSettings.Builder<UpdateInsightsConfigRequest, Operation>
updateInsightsConfigSettings;
private final OperationCallSettings.Builder<
UpdateInsightsConfigRequest, InsightsConfig, OperationMetadata>
updateInsightsConfigOperationSettings;
private final UnaryCallSettings.Builder<DeleteInsightsConfigRequest, Operation>
deleteInsightsConfigSettings;
private final OperationCallSettings.Builder<
DeleteInsightsConfigRequest, Empty, OperationMetadata>
deleteInsightsConfigOperationSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listInsightsConfigsSettings =
PagedCallSettings.newBuilder(LIST_INSIGHTS_CONFIGS_PAGE_STR_FACT);
createInsightsConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createInsightsConfigOperationSettings = OperationCallSettings.newBuilder();
getInsightsConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateInsightsConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateInsightsConfigOperationSettings = OperationCallSettings.newBuilder();
deleteInsightsConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteInsightsConfigOperationSettings = OperationCallSettings.newBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listInsightsConfigsSettings,
createInsightsConfigSettings,
getInsightsConfigSettings,
updateInsightsConfigSettings,
deleteInsightsConfigSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(InsightsConfigServiceStubSettings settings) {
super(settings);
listInsightsConfigsSettings = settings.listInsightsConfigsSettings.toBuilder();
createInsightsConfigSettings = settings.createInsightsConfigSettings.toBuilder();
createInsightsConfigOperationSettings =
settings.createInsightsConfigOperationSettings.toBuilder();
getInsightsConfigSettings = settings.getInsightsConfigSettings.toBuilder();
updateInsightsConfigSettings = settings.updateInsightsConfigSettings.toBuilder();
updateInsightsConfigOperationSettings =
settings.updateInsightsConfigOperationSettings.toBuilder();
deleteInsightsConfigSettings = settings.deleteInsightsConfigSettings.toBuilder();
deleteInsightsConfigOperationSettings =
settings.deleteInsightsConfigOperationSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listInsightsConfigsSettings,
createInsightsConfigSettings,
getInsightsConfigSettings,
updateInsightsConfigSettings,
deleteInsightsConfigSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listInsightsConfigsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createInsightsConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getInsightsConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.updateInsightsConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteInsightsConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createInsightsConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreateInsightsConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(InsightsConfig.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.updateInsightsConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<UpdateInsightsConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(InsightsConfig.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.deleteInsightsConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteInsightsConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listInsightsConfigs. */
public PagedCallSettings.Builder<
ListInsightsConfigsRequest,
ListInsightsConfigsResponse,
ListInsightsConfigsPagedResponse>
listInsightsConfigsSettings() {
return listInsightsConfigsSettings;
}
/** Returns the builder for the settings used for calls to createInsightsConfig. */
public UnaryCallSettings.Builder<CreateInsightsConfigRequest, Operation>
createInsightsConfigSettings() {
return createInsightsConfigSettings;
}
/** Returns the builder for the settings used for calls to createInsightsConfig. */
public OperationCallSettings.Builder<
CreateInsightsConfigRequest, InsightsConfig, OperationMetadata>
createInsightsConfigOperationSettings() {
return createInsightsConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to getInsightsConfig. */
public UnaryCallSettings.Builder<GetInsightsConfigRequest, InsightsConfig>
getInsightsConfigSettings() {
return getInsightsConfigSettings;
}
/** Returns the builder for the settings used for calls to updateInsightsConfig. */
public UnaryCallSettings.Builder<UpdateInsightsConfigRequest, Operation>
updateInsightsConfigSettings() {
return updateInsightsConfigSettings;
}
/** Returns the builder for the settings used for calls to updateInsightsConfig. */
public OperationCallSettings.Builder<
UpdateInsightsConfigRequest, InsightsConfig, OperationMetadata>
updateInsightsConfigOperationSettings() {
return updateInsightsConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to deleteInsightsConfig. */
public UnaryCallSettings.Builder<DeleteInsightsConfigRequest, Operation>
deleteInsightsConfigSettings() {
return deleteInsightsConfigSettings;
}
/** Returns the builder for the settings used for calls to deleteInsightsConfig. */
public OperationCallSettings.Builder<DeleteInsightsConfigRequest, Empty, OperationMetadata>
deleteInsightsConfigOperationSettings() {
return deleteInsightsConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public InsightsConfigServiceStubSettings build() throws IOException {
return new InsightsConfigServiceStubSettings(this);
}
}
}
|
apache/nifi | 37,180 | nifi-manifest/nifi-runtime-manifest-core/src/main/java/org/apache/nifi/runtime/manifest/impl/StandardRuntimeManifestBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.runtime.manifest.impl;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.c2.protocol.component.api.BuildInfo;
import org.apache.nifi.c2.protocol.component.api.Bundle;
import org.apache.nifi.c2.protocol.component.api.ConfigurableComponentDefinition;
import org.apache.nifi.c2.protocol.component.api.ControllerServiceDefinition;
import org.apache.nifi.c2.protocol.component.api.DefinedType;
import org.apache.nifi.c2.protocol.component.api.ExtensionComponent;
import org.apache.nifi.c2.protocol.component.api.FlowAnalysisRuleDefinition;
import org.apache.nifi.c2.protocol.component.api.MultiProcessorUseCase;
import org.apache.nifi.c2.protocol.component.api.ParameterProviderDefinition;
import org.apache.nifi.c2.protocol.component.api.ProcessorConfiguration;
import org.apache.nifi.c2.protocol.component.api.ProcessorDefinition;
import org.apache.nifi.c2.protocol.component.api.PropertyAllowableValue;
import org.apache.nifi.c2.protocol.component.api.PropertyDependency;
import org.apache.nifi.c2.protocol.component.api.PropertyDescriptor;
import org.apache.nifi.c2.protocol.component.api.PropertyResourceDefinition;
import org.apache.nifi.c2.protocol.component.api.Relationship;
import org.apache.nifi.c2.protocol.component.api.ReportingTaskDefinition;
import org.apache.nifi.c2.protocol.component.api.Restriction;
import org.apache.nifi.c2.protocol.component.api.RuntimeManifest;
import org.apache.nifi.c2.protocol.component.api.SchedulingDefaults;
import org.apache.nifi.c2.protocol.component.api.UseCase;
import org.apache.nifi.components.resource.ResourceCardinality;
import org.apache.nifi.components.resource.ResourceType;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.extension.manifest.AllowableValue;
import org.apache.nifi.extension.manifest.Attribute;
import org.apache.nifi.extension.manifest.DefaultSchedule;
import org.apache.nifi.extension.manifest.DefaultSettings;
import org.apache.nifi.extension.manifest.Dependency;
import org.apache.nifi.extension.manifest.DependentValues;
import org.apache.nifi.extension.manifest.DeprecationNotice;
import org.apache.nifi.extension.manifest.DynamicProperty;
import org.apache.nifi.extension.manifest.DynamicRelationship;
import org.apache.nifi.extension.manifest.Extension;
import org.apache.nifi.extension.manifest.ExtensionManifest;
import org.apache.nifi.extension.manifest.Property;
import org.apache.nifi.extension.manifest.ProvidedServiceAPI;
import org.apache.nifi.extension.manifest.ResourceDefinition;
import org.apache.nifi.extension.manifest.Restricted;
import org.apache.nifi.extension.manifest.Stateful;
import org.apache.nifi.extension.manifest.SystemResourceConsideration;
import org.apache.nifi.logging.LogLevel;
import org.apache.nifi.runtime.manifest.ComponentManifestBuilder;
import org.apache.nifi.runtime.manifest.ExtensionManifestContainer;
import org.apache.nifi.runtime.manifest.RuntimeManifestBuilder;
import org.apache.nifi.scheduling.SchedulingStrategy;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
/**
* Standard builder for RuntimeManifest.
*/
public class StandardRuntimeManifestBuilder implements RuntimeManifestBuilder {
private static final String DEFAULT_YIELD_PERIOD = "1 sec";
private static final String DEFAULT_PENALIZATION_PERIOD = "30 sec";
private static final String DEFAULT_BULLETIN_LEVEL = LogLevel.WARN.name();
private String identifier;
private String version;
private String runtimeType;
private BuildInfo buildInfo;
private List<Bundle> bundles = new ArrayList<>();
private SchedulingDefaults schedulingDefaults;
@Override
public RuntimeManifestBuilder identifier(final String identifier) {
this.identifier = identifier;
return this;
}
@Override
public RuntimeManifestBuilder version(final String version) {
this.version = version;
return this;
}
@Override
public RuntimeManifestBuilder runtimeType(final String runtimeType) {
this.runtimeType = runtimeType;
return this;
}
@Override
public RuntimeManifestBuilder buildInfo(final BuildInfo buildInfo) {
this.buildInfo = buildInfo;
return this;
}
@Override
public RuntimeManifestBuilder addBundle(final ExtensionManifestContainer extensionManifestContainer) {
if (extensionManifestContainer == null) {
throw new IllegalArgumentException("Extension manifest container is required");
}
final ExtensionManifest extensionManifest = extensionManifestContainer.getManifest();
if (extensionManifest == null) {
throw new IllegalArgumentException("Extension manifest is required");
}
if (extensionManifest.getGroupId() == null || extensionManifest.getGroupId().isBlank()) {
throw new IllegalArgumentException("Extension manifest groupId is required");
}
if (extensionManifest.getArtifactId() == null || extensionManifest.getArtifactId().isBlank()) {
throw new IllegalArgumentException("Extension manifest artifactId is required");
}
if (extensionManifest.getVersion() == null || extensionManifest.getVersion().isBlank()) {
throw new IllegalArgumentException("Extension manifest version is required");
}
final Bundle bundle = new Bundle();
bundle.setGroup(extensionManifest.getGroupId());
bundle.setArtifact(extensionManifest.getArtifactId());
bundle.setVersion(extensionManifest.getVersion());
if (extensionManifest.getExtensions() != null) {
final Map<String, String> additionalDetailsMap = extensionManifestContainer.getAdditionalDetails();
final ComponentManifestBuilder componentManifestBuilder = new StandardComponentManifestBuilder();
extensionManifest.getExtensions().forEach(extension -> {
final String additionalDetails = additionalDetailsMap.get(extension.getName());
addExtension(extensionManifest, extension, additionalDetails, componentManifestBuilder);
});
bundle.setComponentManifest(componentManifestBuilder.build());
}
bundles.add(bundle);
return this;
}
@Override
public RuntimeManifestBuilder addBundles(final Iterable<ExtensionManifestContainer> extensionManifests) {
extensionManifests.forEach(this::addBundle);
return this;
}
@Override
public RuntimeManifestBuilder addBundle(Bundle bundle) {
if (bundle == null) {
throw new IllegalArgumentException("Bundle is required");
}
bundles.add(bundle);
return this;
}
@Override
public RuntimeManifestBuilder schedulingDefaults(final SchedulingDefaults schedulingDefaults) {
this.schedulingDefaults = schedulingDefaults;
return this;
}
@Override
public RuntimeManifest build() {
final RuntimeManifest runtimeManifest = new RuntimeManifest();
runtimeManifest.setIdentifier(identifier);
runtimeManifest.setVersion(version);
runtimeManifest.setAgentType(runtimeType);
runtimeManifest.setBuildInfo(buildInfo);
runtimeManifest.setBundles(new ArrayList<>(bundles));
runtimeManifest.setSchedulingDefaults(schedulingDefaults);
return runtimeManifest;
}
private void addExtension(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
if (extension == null) {
throw new IllegalArgumentException("Extension cannot be null");
}
switch (extension.getType()) {
case PROCESSOR:
addProcessorDefinition(extensionManifest, extension, additionalDetails, componentManifestBuilder);
break;
case CONTROLLER_SERVICE:
addControllerServiceDefinition(extensionManifest, extension, additionalDetails, componentManifestBuilder);
break;
case REPORTING_TASK:
addReportingTaskDefinition(extensionManifest, extension, additionalDetails, componentManifestBuilder);
break;
case FLOW_ANALYSIS_RULE:
addFlowAnalysisRuleDefinition(extensionManifest, extension, additionalDetails, componentManifestBuilder);
break;
case PARAMETER_PROVIDER:
addParameterProviderDefinition(extensionManifest, extension, additionalDetails, componentManifestBuilder);
break;
}
}
private void addProcessorDefinition(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
final ProcessorDefinition processorDefinition = new ProcessorDefinition();
populateDefinedType(extensionManifest, extension, processorDefinition);
populateExtensionComponent(extensionManifest, extension, additionalDetails, processorDefinition);
populateConfigurableComponent(extension, processorDefinition);
// processor specific fields
processorDefinition.setInputRequirement(getInputRequirement(extension.getInputRequirement()));
processorDefinition.setSupportedRelationships(getSupportedRelationships(extension.getRelationships()));
processorDefinition.setTriggerWhenEmpty(extension.getTriggerWhenEmpty());
processorDefinition.setTriggerSerially(extension.getTriggerSerially());
processorDefinition.setTriggerWhenAnyDestinationAvailable(extension.getTriggerWhenAnyDestinationAvailable());
processorDefinition.setSupportsBatching(extension.getSupportsBatching());
processorDefinition.setPrimaryNodeOnly(extension.getPrimaryNodeOnly());
processorDefinition.setSideEffectFree(extension.getSideEffectFree());
final DynamicRelationship dynamicRelationship = extension.getDynamicRelationship();
if (dynamicRelationship != null) {
processorDefinition.setSupportsDynamicRelationships(true);
processorDefinition.setDynamicRelationship(getDynamicRelationship(dynamicRelationship));
}
final DefaultSettings defaultSettings = extension.getDefaultSettings();
processorDefinition.setDefaultPenaltyDuration(defaultSettings == null ? DEFAULT_PENALIZATION_PERIOD : defaultSettings.getPenaltyDuration());
processorDefinition.setDefaultYieldDuration(defaultSettings == null ? DEFAULT_YIELD_PERIOD : defaultSettings.getYieldDuration());
processorDefinition.setDefaultBulletinLevel(defaultSettings == null ? DEFAULT_BULLETIN_LEVEL : defaultSettings.getBulletinLevel());
final List<String> schedulingStrategies = new ArrayList<>();
schedulingStrategies.add(SchedulingStrategy.TIMER_DRIVEN.name());
schedulingStrategies.add(SchedulingStrategy.CRON_DRIVEN.name());
// If a default schedule is provided then use that, otherwise default to TIMER_DRIVEN
final DefaultSchedule defaultSchedule = extension.getDefaultSchedule();
final String defaultSchedulingStrategy = defaultSchedule == null
? SchedulingStrategy.TIMER_DRIVEN.name() : extension.getDefaultSchedule().getStrategy();
final Map<String, Integer> defaultConcurrentTasks = new LinkedHashMap<>(3);
defaultConcurrentTasks.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultConcurrentTasks());
defaultConcurrentTasks.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultConcurrentTasks());
final Map<String, String> defaultSchedulingPeriods = new LinkedHashMap<>(2);
defaultSchedulingPeriods.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod());
defaultSchedulingPeriods.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod());
// If a default schedule is provided then replace the default values for the default strategy
if (defaultSchedule != null) {
defaultSchedulingPeriods.put(defaultSchedule.getStrategy(), defaultSchedule.getPeriod());
defaultConcurrentTasks.put(defaultSchedule.getStrategy(), Integer.valueOf(defaultSchedule.getConcurrentTasks()));
}
processorDefinition.setSupportedSchedulingStrategies(schedulingStrategies);
processorDefinition.setDefaultSchedulingStrategy(defaultSchedulingStrategy);
processorDefinition.setDefaultConcurrentTasksBySchedulingStrategy(defaultConcurrentTasks);
processorDefinition.setDefaultSchedulingPeriodBySchedulingStrategy(defaultSchedulingPeriods);
final List<Attribute> readsAttributes = extension.getReadsAttributes();
if (isNotEmpty(readsAttributes)) {
processorDefinition.setReadsAttributes(
readsAttributes.stream()
.map(this::getAttribute)
.collect(Collectors.toList())
);
}
final List<Attribute> writesAttributes = extension.getWritesAttributes();
if (isNotEmpty(writesAttributes)) {
processorDefinition.setWritesAttributes(
writesAttributes.stream()
.map(this::getAttribute)
.collect(Collectors.toList())
);
}
final List<UseCase> useCases = extension.getUseCases() == null ? List.of() : extension.getUseCases().stream()
.map(StandardRuntimeManifestBuilder::createUseCase)
.toList();
processorDefinition.setUseCases(useCases);
final List<MultiProcessorUseCase> multiProcessorUseCases = extension.getMultiProcessorUseCases() == null ? List.of() : extension.getMultiProcessorUseCases().stream()
.map(StandardRuntimeManifestBuilder::createMultiProcessorUseCase)
.toList();
processorDefinition.setMultiProcessorUseCases(multiProcessorUseCases);
componentManifestBuilder.addProcessor(processorDefinition);
}
private static UseCase createUseCase(final org.apache.nifi.extension.manifest.UseCase extensionUseCase) {
final UseCase useCase = new UseCase();
useCase.setDescription(extensionUseCase.getDescription());
useCase.setConfiguration(extensionUseCase.getConfiguration());
useCase.setKeywords(extensionUseCase.getKeywords());
useCase.setNotes(extensionUseCase.getNotes());
return useCase;
}
private static MultiProcessorUseCase createMultiProcessorUseCase(final org.apache.nifi.extension.manifest.MultiProcessorUseCase extensionUseCase) {
final MultiProcessorUseCase useCase = new MultiProcessorUseCase();
useCase.setDescription(extensionUseCase.getDescription());
useCase.setKeywords(extensionUseCase.getKeywords());
useCase.setNotes(extensionUseCase.getNotes());
final List<ProcessorConfiguration> processorConfigs = new ArrayList<>();
for (final org.apache.nifi.extension.manifest.ProcessorConfiguration extensionConfig : extensionUseCase.getProcessorConfigurations()) {
final ProcessorConfiguration processorConfig = new ProcessorConfiguration();
processorConfig.setConfiguration(extensionConfig.getConfiguration());
processorConfig.setProcessorClassName(extensionConfig.getProcessorClassName());
processorConfigs.add(processorConfig);
}
useCase.setConfigurations(processorConfigs);
return useCase;
}
private org.apache.nifi.c2.protocol.component.api.Attribute getAttribute(final Attribute attribute) {
final org.apache.nifi.c2.protocol.component.api.Attribute c2Attribute = new org.apache.nifi.c2.protocol.component.api.Attribute();
c2Attribute.setName(attribute.getName());
c2Attribute.setDescription(attribute.getDescription());
return c2Attribute;
}
private org.apache.nifi.c2.protocol.component.api.DynamicRelationship getDynamicRelationship(final DynamicRelationship dynamicRelationship) {
final org.apache.nifi.c2.protocol.component.api.DynamicRelationship c2DynamicRelationship = new org.apache.nifi.c2.protocol.component.api.DynamicRelationship();
c2DynamicRelationship.setName(dynamicRelationship.getName());
c2DynamicRelationship.setDescription(dynamicRelationship.getDescription());
return c2DynamicRelationship;
}
private InputRequirement.Requirement getInputRequirement(final org.apache.nifi.extension.manifest.InputRequirement inputRequirement) {
if (inputRequirement == null) {
return null;
}
return switch (inputRequirement) {
case INPUT_ALLOWED -> InputRequirement.Requirement.INPUT_ALLOWED;
case INPUT_REQUIRED -> InputRequirement.Requirement.INPUT_REQUIRED;
case INPUT_FORBIDDEN -> InputRequirement.Requirement.INPUT_FORBIDDEN;
};
}
private List<Relationship> getSupportedRelationships(final List<org.apache.nifi.extension.manifest.Relationship> relationships) {
if (relationships == null || relationships.isEmpty()) {
return null;
}
final List<Relationship> componentRelationships = new ArrayList<>();
for (final org.apache.nifi.extension.manifest.Relationship relationship : relationships) {
final Relationship componentRelationship = new Relationship();
componentRelationship.setName(relationship.getName());
componentRelationship.setDescription(relationship.getDescription());
componentRelationships.add(componentRelationship);
}
return componentRelationships;
}
private void addControllerServiceDefinition(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
final ControllerServiceDefinition controllerServiceDefinition = new ControllerServiceDefinition();
populateDefinedType(extensionManifest, extension, controllerServiceDefinition);
populateExtensionComponent(extensionManifest, extension, additionalDetails, controllerServiceDefinition);
populateConfigurableComponent(extension, controllerServiceDefinition);
componentManifestBuilder.addControllerService(controllerServiceDefinition);
}
private void addParameterProviderDefinition(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
final ParameterProviderDefinition parameterProviderDefinition = new ParameterProviderDefinition();
populateDefinedType(extensionManifest, extension, parameterProviderDefinition);
populateExtensionComponent(extensionManifest, extension, additionalDetails, parameterProviderDefinition);
populateConfigurableComponent(extension, parameterProviderDefinition);
componentManifestBuilder.addParameterProvider(parameterProviderDefinition);
}
private void addFlowAnalysisRuleDefinition(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
final FlowAnalysisRuleDefinition flowAnalysisRuleDefinition = new FlowAnalysisRuleDefinition();
populateDefinedType(extensionManifest, extension, flowAnalysisRuleDefinition);
populateExtensionComponent(extensionManifest, extension, additionalDetails, flowAnalysisRuleDefinition);
populateConfigurableComponent(extension, flowAnalysisRuleDefinition);
componentManifestBuilder.addFlowAnalysisRule(flowAnalysisRuleDefinition);
}
private void addReportingTaskDefinition(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ComponentManifestBuilder componentManifestBuilder) {
final ReportingTaskDefinition reportingTaskDefinition = new ReportingTaskDefinition();
populateDefinedType(extensionManifest, extension, reportingTaskDefinition);
populateExtensionComponent(extensionManifest, extension, additionalDetails, reportingTaskDefinition);
populateConfigurableComponent(extension, reportingTaskDefinition);
final List<String> schedulingStrategies = new ArrayList<>();
schedulingStrategies.add(SchedulingStrategy.TIMER_DRIVEN.name());
schedulingStrategies.add(SchedulingStrategy.CRON_DRIVEN.name());
// If a default schedule is provided then use that, otherwise default to TIMER_DRIVEN
final DefaultSchedule defaultSchedule = extension.getDefaultSchedule();
final String defaultSchedulingStrategy = defaultSchedule == null
? SchedulingStrategy.TIMER_DRIVEN.name() : extension.getDefaultSchedule().getStrategy();
final Map<String, String> defaultSchedulingPeriods = new LinkedHashMap<>(2);
defaultSchedulingPeriods.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod());
defaultSchedulingPeriods.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod());
// If a default schedule is provided then replace the default values for the default strategy
if (defaultSchedule != null) {
defaultSchedulingPeriods.put(defaultSchedule.getStrategy(), defaultSchedule.getPeriod());
}
reportingTaskDefinition.setSupportedSchedulingStrategies(schedulingStrategies);
reportingTaskDefinition.setDefaultSchedulingStrategy(defaultSchedulingStrategy);
reportingTaskDefinition.setDefaultSchedulingPeriodBySchedulingStrategy(defaultSchedulingPeriods);
componentManifestBuilder.addReportingTask(reportingTaskDefinition);
}
private void populateDefinedType(final ExtensionManifest extensionManifest, final Extension extension, final DefinedType definedType) {
definedType.setType(extension.getName());
definedType.setTypeDescription(extension.getDescription());
definedType.setGroup(extensionManifest.getGroupId());
definedType.setArtifact(extensionManifest.getArtifactId());
definedType.setVersion(extensionManifest.getVersion());
}
private void populateExtensionComponent(final ExtensionManifest extensionManifest, final Extension extension, final String additionalDetails,
final ExtensionComponent extensionComponent) {
final org.apache.nifi.extension.manifest.BuildInfo buildInfo = extensionManifest.getBuildInfo();
if (buildInfo != null) {
final BuildInfo componentBuildInfo = new BuildInfo();
componentBuildInfo.setRevision(buildInfo.getRevision());
extensionComponent.setBuildInfo(componentBuildInfo);
}
final List<String> tags = extension.getTags();
if (isNotEmpty(tags)) {
extensionComponent.setTags(new TreeSet<>(tags));
}
final List<String> seeAlso = extension.getSeeAlso();
if (isNotEmpty(seeAlso)) {
extensionComponent.setSeeAlso(new TreeSet<>(seeAlso));
}
// the extension-manifest.xml will have <deprecationNotice/> for non-deprecated components which unmarshalls into
// a non-null DeprecationNotice, so we need to check if the reason is also non-null before setting the boolean here
final DeprecationNotice deprecationNotice = extension.getDeprecationNotice();
if (deprecationNotice != null && deprecationNotice.getReason() != null) {
extensionComponent.setDeprecated(true);
extensionComponent.setDeprecationReason(deprecationNotice.getReason());
final List<String> alternatives = deprecationNotice.getAlternatives();
if (isNotEmpty(alternatives)) {
extensionComponent.setDeprecationAlternatives(new TreeSet<>(alternatives));
}
}
final List<ProvidedServiceAPI> providedServiceApis = extension.getProvidedServiceAPIs();
if (isNotEmpty(providedServiceApis)) {
final List<DefinedType> providedApiTypes = new ArrayList<>();
providedServiceApis.forEach(providedServiceApi -> providedApiTypes.add(createProvidedApiType(providedServiceApi)));
extensionComponent.setProvidedApiImplementations(providedApiTypes);
}
final Restricted restricted = extension.getRestricted();
if (restricted != null) {
extensionComponent.setRestricted(true);
extensionComponent.setRestrictedExplanation(restricted.getGeneralRestrictionExplanation());
if (restricted.getRestrictions() != null) {
final Set<Restriction> explicitRestrictions = new HashSet<>();
restricted.getRestrictions().forEach(r -> explicitRestrictions.add(createRestriction(r)));
extensionComponent.setExplicitRestrictions(explicitRestrictions);
}
}
final Stateful stateful = extension.getStateful();
if (stateful != null) {
final org.apache.nifi.c2.protocol.component.api.Stateful componentStateful = new org.apache.nifi.c2.protocol.component.api.Stateful();
componentStateful.setDescription(stateful.getDescription());
if (stateful.getScopes() != null) {
componentStateful.setScopes(
stateful.getScopes().stream()
.map(this::getScope)
.collect(Collectors.toSet())
);
extensionComponent.setStateful(componentStateful);
}
}
final List<SystemResourceConsideration> systemResourceConsiderations = extension.getSystemResourceConsiderations();
if (isNotEmpty(systemResourceConsiderations)) {
extensionComponent.setSystemResourceConsiderations(
systemResourceConsiderations.stream()
.map(this::getSystemResourceConsideration)
.collect(Collectors.toList())
);
}
if (additionalDetails != null) {
extensionComponent.setAdditionalDetails(true);
}
}
private org.apache.nifi.c2.protocol.component.api.SystemResourceConsideration getSystemResourceConsideration(final SystemResourceConsideration systemResourceConsideration) {
final org.apache.nifi.c2.protocol.component.api.SystemResourceConsideration c2consideration = new org.apache.nifi.c2.protocol.component.api.SystemResourceConsideration();
c2consideration.setResource(systemResourceConsideration.getResource());
c2consideration.setDescription(systemResourceConsideration.getDescription());
return c2consideration;
}
private Scope getScope(final org.apache.nifi.extension.manifest.Scope sourceScope) {
return switch (sourceScope) {
case LOCAL -> Scope.LOCAL;
case CLUSTER -> Scope.CLUSTER;
};
}
private Restriction createRestriction(final org.apache.nifi.extension.manifest.Restriction extensionRestriction) {
final Restriction restriction = new Restriction();
restriction.setExplanation(extensionRestriction.getExplanation());
restriction.setRequiredPermission(extensionRestriction.getRequiredPermission());
return restriction;
}
private DefinedType createProvidedApiType(final ProvidedServiceAPI providedServiceApi) {
final DefinedType providedApiType = new DefinedType();
providedApiType.setType(providedServiceApi.getClassName());
providedApiType.setGroup(providedServiceApi.getGroupId());
providedApiType.setArtifact(providedServiceApi.getArtifactId());
providedApiType.setVersion(providedServiceApi.getVersion());
return providedApiType;
}
private void populateConfigurableComponent(final Extension extension, final ConfigurableComponentDefinition configurableComponentDefinition) {
final List<Property> properties = extension.getProperties();
if (isNotEmpty(properties)) {
final Map<String, PropertyDescriptor> propertyDescriptors = new LinkedHashMap<>();
properties.forEach(property -> addPropertyDescriptor(propertyDescriptors, property));
configurableComponentDefinition.setPropertyDescriptors(propertyDescriptors);
}
final List<DynamicProperty> dynamicProperties = extension.getDynamicProperties();
if (isNotEmpty(dynamicProperties)) {
configurableComponentDefinition.setSupportsDynamicProperties(true);
configurableComponentDefinition.setSupportsSensitiveDynamicProperties(extension.getSupportsSensitiveDynamicProperties());
configurableComponentDefinition.setDynamicProperties(
dynamicProperties.stream()
.map(this::getDynamicProperty)
.collect(Collectors.toList())
);
}
}
private org.apache.nifi.c2.protocol.component.api.DynamicProperty getDynamicProperty(final DynamicProperty dynamicProperty) {
final org.apache.nifi.c2.protocol.component.api.DynamicProperty c2DynamicProperty = new org.apache.nifi.c2.protocol.component.api.DynamicProperty();
c2DynamicProperty.setName(dynamicProperty.getName());
c2DynamicProperty.setValue(dynamicProperty.getValue());
c2DynamicProperty.setDescription(dynamicProperty.getDescription());
c2DynamicProperty.setExpressionLanguageScope(getELScope(dynamicProperty.getExpressionLanguageScope()));
return c2DynamicProperty;
}
private void addPropertyDescriptor(final Map<String, PropertyDescriptor> propertyDescriptors, final Property property) {
final PropertyDescriptor propertyDescriptor = createPropertyDescriptor(property);
propertyDescriptors.put(propertyDescriptor.getName(), propertyDescriptor);
}
private PropertyDescriptor createPropertyDescriptor(final Property property) {
final PropertyDescriptor descriptor = new PropertyDescriptor();
descriptor.setName(property.getName());
descriptor.setDisplayName(property.getDisplayName());
descriptor.setDescription(property.getDescription());
descriptor.setDefaultValue(property.getDefaultValue());
descriptor.setRequired(property.isRequired());
descriptor.setSensitive(property.isSensitive());
descriptor.setExpressionLanguageScope(getELScope(property.getExpressionLanguageScope()));
descriptor.setDynamic(property.isDynamic());
descriptor.setAllowableValues(getPropertyAllowableValues(property.getAllowableValues()));
descriptor.setTypeProvidedByValue(getControllerServiceDefinedType(property.getControllerServiceDefinition()));
descriptor.setResourceDefinition(getPropertyResourceDefinition(property.getResourceDefinition()));
descriptor.setDependencies(getPropertyDependencies(property.getDependencies()));
return descriptor;
}
private List<PropertyDependency> getPropertyDependencies(final List<Dependency> dependencies) {
if (dependencies == null || dependencies.isEmpty()) {
return null;
}
final List<PropertyDependency> propertyDependencies = new ArrayList<>(dependencies.size());
for (final Dependency dependency : dependencies) {
final PropertyDependency propertyDependency = new PropertyDependency();
propertyDependency.setPropertyName(dependency.getPropertyName());
propertyDependency.setPropertyDisplayName(dependency.getPropertyDisplayName());
final DependentValues dependentValues = dependency.getDependentValues();
if (dependentValues != null && dependentValues.getValues() != null) {
final List<String> values = new ArrayList<>(dependentValues.getValues());
propertyDependency.setDependentValues(values);
}
propertyDependencies.add(propertyDependency);
}
return propertyDependencies;
}
private PropertyResourceDefinition getPropertyResourceDefinition(final ResourceDefinition resourceDefinition) {
if (resourceDefinition == null || resourceDefinition.getCardinality() == null) {
return null;
}
final PropertyResourceDefinition propertyResourceDefinition = new PropertyResourceDefinition();
final ResourceCardinality cardinality = switch (resourceDefinition.getCardinality()) {
case SINGLE -> ResourceCardinality.SINGLE;
case MULTIPLE -> ResourceCardinality.MULTIPLE;
};
propertyResourceDefinition.setCardinality(cardinality);
propertyResourceDefinition.setResourceTypes(
resourceDefinition.getResourceTypes().stream()
.map(this::getResourceType)
.collect(Collectors.toSet())
);
return propertyResourceDefinition;
}
private ResourceType getResourceType(final org.apache.nifi.extension.manifest.ResourceType resourceType) {
return switch (resourceType) {
case URL -> ResourceType.URL;
case FILE -> ResourceType.FILE;
case TEXT -> ResourceType.TEXT;
case DIRECTORY -> ResourceType.DIRECTORY;
};
}
private ExpressionLanguageScope getELScope(final org.apache.nifi.extension.manifest.ExpressionLanguageScope elScope) {
if (elScope == null) {
return null;
}
return switch (elScope) {
case NONE -> ExpressionLanguageScope.NONE;
case FLOWFILE_ATTRIBUTES -> ExpressionLanguageScope.FLOWFILE_ATTRIBUTES;
case ENVIRONMENT -> ExpressionLanguageScope.ENVIRONMENT;
};
}
private List<PropertyAllowableValue> getPropertyAllowableValues(final List<AllowableValue> allowableValues) {
if (allowableValues == null || allowableValues.isEmpty()) {
return null;
}
final List<PropertyAllowableValue> propertyAllowableValues = new ArrayList<>();
for (final AllowableValue allowableValue : allowableValues) {
final PropertyAllowableValue propertyAllowableValue = new PropertyAllowableValue();
propertyAllowableValue.setValue(allowableValue.getValue());
propertyAllowableValue.setDisplayName(allowableValue.getDisplayName());
propertyAllowableValue.setDescription(allowableValue.getDescription());
propertyAllowableValues.add(propertyAllowableValue);
}
return propertyAllowableValues;
}
private DefinedType getControllerServiceDefinedType(
final org.apache.nifi.extension.manifest.ControllerServiceDefinition controllerServiceDefinition) {
if (controllerServiceDefinition == null) {
return null;
}
final DefinedType serviceDefinitionType = new DefinedType();
serviceDefinitionType.setType(controllerServiceDefinition.getClassName());
serviceDefinitionType.setGroup(controllerServiceDefinition.getGroupId());
serviceDefinitionType.setArtifact(controllerServiceDefinition.getArtifactId());
serviceDefinitionType.setVersion(controllerServiceDefinition.getVersion());
return serviceDefinitionType;
}
private <T> boolean isNotEmpty(final Collection<T> collection) {
return collection != null && !collection.isEmpty();
}
}
|
googleapis/google-cloud-java | 36,623 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RemoveRuleRegionSecurityPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for RegionSecurityPolicies.RemoveRule. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest}
*/
public final class RemoveRuleRegionSecurityPolicyRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest)
RemoveRuleRegionSecurityPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoveRuleRegionSecurityPolicyRequest.newBuilder() to construct.
private RemoveRuleRegionSecurityPolicyRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RemoveRuleRegionSecurityPolicyRequest() {
project_ = "";
region_ = "";
securityPolicy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RemoveRuleRegionSecurityPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RemoveRuleRegionSecurityPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RemoveRuleRegionSecurityPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.class,
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int PRIORITY_FIELD_NUMBER = 445151652;
private int priority_ = 0;
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return The priority.
*/
@java.lang.Override
public int getPriority() {
return priority_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
@SuppressWarnings("serial")
private volatile java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SECURITY_POLICY_FIELD_NUMBER = 171082513;
@SuppressWarnings("serial")
private volatile java.lang.Object securityPolicy_ = "";
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The securityPolicy.
*/
@java.lang.Override
public java.lang.String getSecurityPolicy() {
java.lang.Object ref = securityPolicy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
securityPolicy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for securityPolicy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSecurityPolicyBytes() {
java.lang.Object ref = securityPolicy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
securityPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(securityPolicy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 171082513, securityPolicy_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(445151652, priority_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(securityPolicy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(171082513, securityPolicy_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(445151652, priority_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest other =
(com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest) obj;
if (hasPriority() != other.hasPriority()) return false;
if (hasPriority()) {
if (getPriority() != other.getPriority()) return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (!getSecurityPolicy().equals(other.getSecurityPolicy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPriority()) {
hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
hash = (53 * hash) + getPriority();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
hash = (37 * hash) + SECURITY_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getSecurityPolicy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for RegionSecurityPolicies.RemoveRule. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest)
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RemoveRuleRegionSecurityPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RemoveRuleRegionSecurityPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.class,
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
priority_ = 0;
project_ = "";
region_ = "";
securityPolicy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RemoveRuleRegionSecurityPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest build() {
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest buildPartial() {
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest result =
new com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.priority_ = priority_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.region_ = region_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.securityPolicy_ = securityPolicy_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest) {
return mergeFrom((com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest other) {
if (other
== com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest.getDefaultInstance())
return this;
if (other.hasPriority()) {
setPriority(other.getPriority());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getSecurityPolicy().isEmpty()) {
securityPolicy_ = other.securityPolicy_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1111570338:
{
region_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1111570338
case 1368660106:
{
securityPolicy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 1368660106
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -733754080:
{
priority_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -733754080
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int priority_;
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return The priority.
*/
@java.lang.Override
public int getPriority() {
return priority_;
}
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @param value The priority to set.
* @return This builder for chaining.
*/
public Builder setPriority(int value) {
priority_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The priority of the rule to remove from the security policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return This builder for chaining.
*/
public Builder clearPriority() {
bitField0_ = (bitField0_ & ~0x00000001);
priority_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>
* string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"];
* </code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object securityPolicy_ = "";
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The securityPolicy.
*/
public java.lang.String getSecurityPolicy() {
java.lang.Object ref = securityPolicy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
securityPolicy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for securityPolicy.
*/
public com.google.protobuf.ByteString getSecurityPolicyBytes() {
java.lang.Object ref = securityPolicy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
securityPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The securityPolicy to set.
* @return This builder for chaining.
*/
public Builder setSecurityPolicy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
securityPolicy_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearSecurityPolicy() {
securityPolicy_ = getDefaultInstance().getSecurityPolicy();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the security policy to update.
* </pre>
*
* <code>string security_policy = 171082513 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for securityPolicy to set.
* @return This builder for chaining.
*/
public Builder setSecurityPolicyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
securityPolicy_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest)
private static final com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest();
}
public static com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RemoveRuleRegionSecurityPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<RemoveRuleRegionSecurityPolicyRequest>() {
@java.lang.Override
public RemoveRuleRegionSecurityPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RemoveRuleRegionSecurityPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RemoveRuleRegionSecurityPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RemoveRuleRegionSecurityPolicyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,036 | java-iam/google-iam-policy/src/main/java/com/google/iam/v3/stub/HttpJsonPrincipalAccessBoundaryPoliciesStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.iam.v3.stub;
import static com.google.iam.v3.PrincipalAccessBoundaryPoliciesClient.ListPrincipalAccessBoundaryPoliciesPagedResponse;
import static com.google.iam.v3.PrincipalAccessBoundaryPoliciesClient.SearchPrincipalAccessBoundaryPolicyBindingsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableMap;
import com.google.iam.v3.CreatePrincipalAccessBoundaryPolicyRequest;
import com.google.iam.v3.DeletePrincipalAccessBoundaryPolicyRequest;
import com.google.iam.v3.GetPrincipalAccessBoundaryPolicyRequest;
import com.google.iam.v3.ListPrincipalAccessBoundaryPoliciesRequest;
import com.google.iam.v3.ListPrincipalAccessBoundaryPoliciesResponse;
import com.google.iam.v3.OperationMetadata;
import com.google.iam.v3.PrincipalAccessBoundaryPolicy;
import com.google.iam.v3.SearchPrincipalAccessBoundaryPolicyBindingsRequest;
import com.google.iam.v3.SearchPrincipalAccessBoundaryPolicyBindingsResponse;
import com.google.iam.v3.UpdatePrincipalAccessBoundaryPolicyRequest;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the PrincipalAccessBoundaryPolicies service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonPrincipalAccessBoundaryPoliciesStub
extends PrincipalAccessBoundaryPoliciesStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(PrincipalAccessBoundaryPolicy.getDescriptor())
.add(Empty.getDescriptor())
.add(OperationMetadata.getDescriptor())
.build();
private static final ApiMethodDescriptor<CreatePrincipalAccessBoundaryPolicyRequest, Operation>
createPrincipalAccessBoundaryPolicyMethodDescriptor =
ApiMethodDescriptor.<CreatePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/CreatePrincipalAccessBoundaryPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<CreatePrincipalAccessBoundaryPolicyRequest>newBuilder()
.setPath(
"/v3/{parent=organizations/*/locations/*}/principalAccessBoundaryPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreatePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreatePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(
fields,
"principalAccessBoundaryPolicyId",
request.getPrincipalAccessBoundaryPolicyId());
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"principalAccessBoundaryPolicy",
request.getPrincipalAccessBoundaryPolicy(),
true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreatePrincipalAccessBoundaryPolicyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<
GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>
getPrincipalAccessBoundaryPolicyMethodDescriptor =
ApiMethodDescriptor
.<GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/GetPrincipalAccessBoundaryPolicy")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetPrincipalAccessBoundaryPolicyRequest>newBuilder()
.setPath(
"/v3/{name=organizations/*/locations/*/principalAccessBoundaryPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetPrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetPrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<PrincipalAccessBoundaryPolicy>newBuilder()
.setDefaultInstance(PrincipalAccessBoundaryPolicy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>
updatePrincipalAccessBoundaryPolicyMethodDescriptor =
ApiMethodDescriptor.<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/UpdatePrincipalAccessBoundaryPolicy")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<UpdatePrincipalAccessBoundaryPolicyRequest>newBuilder()
.setPath(
"/v3/{principalAccessBoundaryPolicy.name=organizations/*/locations/*/principalAccessBoundaryPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdatePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(
fields,
"principalAccessBoundaryPolicy.name",
request.getPrincipalAccessBoundaryPolicy().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdatePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"principalAccessBoundaryPolicy",
request.getPrincipalAccessBoundaryPolicy(),
true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(UpdatePrincipalAccessBoundaryPolicyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<DeletePrincipalAccessBoundaryPolicyRequest, Operation>
deletePrincipalAccessBoundaryPolicyMethodDescriptor =
ApiMethodDescriptor.<DeletePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/DeletePrincipalAccessBoundaryPolicy")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<DeletePrincipalAccessBoundaryPolicyRequest>newBuilder()
.setPath(
"/v3/{name=organizations/*/locations/*/principalAccessBoundaryPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeletePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeletePrincipalAccessBoundaryPolicyRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "etag", request.getEtag());
serializer.putQueryParam(fields, "force", request.getForce());
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeletePrincipalAccessBoundaryPolicyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<
ListPrincipalAccessBoundaryPoliciesRequest, ListPrincipalAccessBoundaryPoliciesResponse>
listPrincipalAccessBoundaryPoliciesMethodDescriptor =
ApiMethodDescriptor
.<ListPrincipalAccessBoundaryPoliciesRequest,
ListPrincipalAccessBoundaryPoliciesResponse>
newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/ListPrincipalAccessBoundaryPolicies")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<ListPrincipalAccessBoundaryPoliciesRequest>newBuilder()
.setPath(
"/v3/{parent=organizations/*/locations/*}/principalAccessBoundaryPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListPrincipalAccessBoundaryPoliciesRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListPrincipalAccessBoundaryPoliciesRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser
.<ListPrincipalAccessBoundaryPoliciesResponse>newBuilder()
.setDefaultInstance(
ListPrincipalAccessBoundaryPoliciesResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
searchPrincipalAccessBoundaryPolicyBindingsMethodDescriptor =
ApiMethodDescriptor
.<SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
newBuilder()
.setFullMethodName(
"google.iam.v3.PrincipalAccessBoundaryPolicies/SearchPrincipalAccessBoundaryPolicyBindings")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter
.<SearchPrincipalAccessBoundaryPolicyBindingsRequest>newBuilder()
.setPath(
"/v3/{name=organizations/*/locations/*/principalAccessBoundaryPolicies/*}:searchPolicyBindings",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SearchPrincipalAccessBoundaryPolicyBindingsRequest>
serializer = ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SearchPrincipalAccessBoundaryPolicyBindingsRequest>
serializer = ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser
.<SearchPrincipalAccessBoundaryPolicyBindingsResponse>newBuilder()
.setDefaultInstance(
SearchPrincipalAccessBoundaryPolicyBindingsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<CreatePrincipalAccessBoundaryPolicyRequest, Operation>
createPrincipalAccessBoundaryPolicyCallable;
private final OperationCallable<
CreatePrincipalAccessBoundaryPolicyRequest,
PrincipalAccessBoundaryPolicy,
OperationMetadata>
createPrincipalAccessBoundaryPolicyOperationCallable;
private final UnaryCallable<
GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>
getPrincipalAccessBoundaryPolicyCallable;
private final UnaryCallable<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>
updatePrincipalAccessBoundaryPolicyCallable;
private final OperationCallable<
UpdatePrincipalAccessBoundaryPolicyRequest,
PrincipalAccessBoundaryPolicy,
OperationMetadata>
updatePrincipalAccessBoundaryPolicyOperationCallable;
private final UnaryCallable<DeletePrincipalAccessBoundaryPolicyRequest, Operation>
deletePrincipalAccessBoundaryPolicyCallable;
private final OperationCallable<
DeletePrincipalAccessBoundaryPolicyRequest, Empty, OperationMetadata>
deletePrincipalAccessBoundaryPolicyOperationCallable;
private final UnaryCallable<
ListPrincipalAccessBoundaryPoliciesRequest, ListPrincipalAccessBoundaryPoliciesResponse>
listPrincipalAccessBoundaryPoliciesCallable;
private final UnaryCallable<
ListPrincipalAccessBoundaryPoliciesRequest,
ListPrincipalAccessBoundaryPoliciesPagedResponse>
listPrincipalAccessBoundaryPoliciesPagedCallable;
private final UnaryCallable<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
searchPrincipalAccessBoundaryPolicyBindingsCallable;
private final UnaryCallable<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsPagedResponse>
searchPrincipalAccessBoundaryPolicyBindingsPagedCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonPrincipalAccessBoundaryPoliciesStub create(
PrincipalAccessBoundaryPoliciesStubSettings settings) throws IOException {
return new HttpJsonPrincipalAccessBoundaryPoliciesStub(
settings, ClientContext.create(settings));
}
public static final HttpJsonPrincipalAccessBoundaryPoliciesStub create(
ClientContext clientContext) throws IOException {
return new HttpJsonPrincipalAccessBoundaryPoliciesStub(
PrincipalAccessBoundaryPoliciesStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonPrincipalAccessBoundaryPoliciesStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonPrincipalAccessBoundaryPoliciesStub(
PrincipalAccessBoundaryPoliciesStubSettings.newHttpJsonBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of HttpJsonPrincipalAccessBoundaryPoliciesStub, using the given
* settings. This is protected so that it is easy to make a subclass, but otherwise, the static
* factory methods should be preferred.
*/
protected HttpJsonPrincipalAccessBoundaryPoliciesStub(
PrincipalAccessBoundaryPoliciesStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonPrincipalAccessBoundaryPoliciesCallableFactory());
}
/**
* Constructs an instance of HttpJsonPrincipalAccessBoundaryPoliciesStub, using the given
* settings. This is protected so that it is easy to make a subclass, but otherwise, the static
* factory methods should be preferred.
*/
protected HttpJsonPrincipalAccessBoundaryPoliciesStub(
PrincipalAccessBoundaryPoliciesStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v3/{name=projects/*/locations/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3/{name=folders/*/locations/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3/{name=organizations/*/locations/*/operations/*}")
.build())
.build())
.build());
HttpJsonCallSettings<CreatePrincipalAccessBoundaryPolicyRequest, Operation>
createPrincipalAccessBoundaryPolicyTransportSettings =
HttpJsonCallSettings.<CreatePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(createPrincipalAccessBoundaryPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>
getPrincipalAccessBoundaryPolicyTransportSettings =
HttpJsonCallSettings
.<GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>
newBuilder()
.setMethodDescriptor(getPrincipalAccessBoundaryPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>
updatePrincipalAccessBoundaryPolicyTransportSettings =
HttpJsonCallSettings.<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(updatePrincipalAccessBoundaryPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"principal_access_boundary_policy.name",
String.valueOf(request.getPrincipalAccessBoundaryPolicy().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeletePrincipalAccessBoundaryPolicyRequest, Operation>
deletePrincipalAccessBoundaryPolicyTransportSettings =
HttpJsonCallSettings.<DeletePrincipalAccessBoundaryPolicyRequest, Operation>newBuilder()
.setMethodDescriptor(deletePrincipalAccessBoundaryPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<
ListPrincipalAccessBoundaryPoliciesRequest, ListPrincipalAccessBoundaryPoliciesResponse>
listPrincipalAccessBoundaryPoliciesTransportSettings =
HttpJsonCallSettings
.<ListPrincipalAccessBoundaryPoliciesRequest,
ListPrincipalAccessBoundaryPoliciesResponse>
newBuilder()
.setMethodDescriptor(listPrincipalAccessBoundaryPoliciesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
searchPrincipalAccessBoundaryPolicyBindingsTransportSettings =
HttpJsonCallSettings
.<SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
newBuilder()
.setMethodDescriptor(searchPrincipalAccessBoundaryPolicyBindingsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.createPrincipalAccessBoundaryPolicyCallable =
callableFactory.createUnaryCallable(
createPrincipalAccessBoundaryPolicyTransportSettings,
settings.createPrincipalAccessBoundaryPolicySettings(),
clientContext);
this.createPrincipalAccessBoundaryPolicyOperationCallable =
callableFactory.createOperationCallable(
createPrincipalAccessBoundaryPolicyTransportSettings,
settings.createPrincipalAccessBoundaryPolicyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getPrincipalAccessBoundaryPolicyCallable =
callableFactory.createUnaryCallable(
getPrincipalAccessBoundaryPolicyTransportSettings,
settings.getPrincipalAccessBoundaryPolicySettings(),
clientContext);
this.updatePrincipalAccessBoundaryPolicyCallable =
callableFactory.createUnaryCallable(
updatePrincipalAccessBoundaryPolicyTransportSettings,
settings.updatePrincipalAccessBoundaryPolicySettings(),
clientContext);
this.updatePrincipalAccessBoundaryPolicyOperationCallable =
callableFactory.createOperationCallable(
updatePrincipalAccessBoundaryPolicyTransportSettings,
settings.updatePrincipalAccessBoundaryPolicyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.deletePrincipalAccessBoundaryPolicyCallable =
callableFactory.createUnaryCallable(
deletePrincipalAccessBoundaryPolicyTransportSettings,
settings.deletePrincipalAccessBoundaryPolicySettings(),
clientContext);
this.deletePrincipalAccessBoundaryPolicyOperationCallable =
callableFactory.createOperationCallable(
deletePrincipalAccessBoundaryPolicyTransportSettings,
settings.deletePrincipalAccessBoundaryPolicyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listPrincipalAccessBoundaryPoliciesCallable =
callableFactory.createUnaryCallable(
listPrincipalAccessBoundaryPoliciesTransportSettings,
settings.listPrincipalAccessBoundaryPoliciesSettings(),
clientContext);
this.listPrincipalAccessBoundaryPoliciesPagedCallable =
callableFactory.createPagedCallable(
listPrincipalAccessBoundaryPoliciesTransportSettings,
settings.listPrincipalAccessBoundaryPoliciesSettings(),
clientContext);
this.searchPrincipalAccessBoundaryPolicyBindingsCallable =
callableFactory.createUnaryCallable(
searchPrincipalAccessBoundaryPolicyBindingsTransportSettings,
settings.searchPrincipalAccessBoundaryPolicyBindingsSettings(),
clientContext);
this.searchPrincipalAccessBoundaryPolicyBindingsPagedCallable =
callableFactory.createPagedCallable(
searchPrincipalAccessBoundaryPolicyBindingsTransportSettings,
settings.searchPrincipalAccessBoundaryPolicyBindingsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createPrincipalAccessBoundaryPolicyMethodDescriptor);
methodDescriptors.add(getPrincipalAccessBoundaryPolicyMethodDescriptor);
methodDescriptors.add(updatePrincipalAccessBoundaryPolicyMethodDescriptor);
methodDescriptors.add(deletePrincipalAccessBoundaryPolicyMethodDescriptor);
methodDescriptors.add(listPrincipalAccessBoundaryPoliciesMethodDescriptor);
methodDescriptors.add(searchPrincipalAccessBoundaryPolicyBindingsMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<CreatePrincipalAccessBoundaryPolicyRequest, Operation>
createPrincipalAccessBoundaryPolicyCallable() {
return createPrincipalAccessBoundaryPolicyCallable;
}
@Override
public OperationCallable<
CreatePrincipalAccessBoundaryPolicyRequest,
PrincipalAccessBoundaryPolicy,
OperationMetadata>
createPrincipalAccessBoundaryPolicyOperationCallable() {
return createPrincipalAccessBoundaryPolicyOperationCallable;
}
@Override
public UnaryCallable<GetPrincipalAccessBoundaryPolicyRequest, PrincipalAccessBoundaryPolicy>
getPrincipalAccessBoundaryPolicyCallable() {
return getPrincipalAccessBoundaryPolicyCallable;
}
@Override
public UnaryCallable<UpdatePrincipalAccessBoundaryPolicyRequest, Operation>
updatePrincipalAccessBoundaryPolicyCallable() {
return updatePrincipalAccessBoundaryPolicyCallable;
}
@Override
public OperationCallable<
UpdatePrincipalAccessBoundaryPolicyRequest,
PrincipalAccessBoundaryPolicy,
OperationMetadata>
updatePrincipalAccessBoundaryPolicyOperationCallable() {
return updatePrincipalAccessBoundaryPolicyOperationCallable;
}
@Override
public UnaryCallable<DeletePrincipalAccessBoundaryPolicyRequest, Operation>
deletePrincipalAccessBoundaryPolicyCallable() {
return deletePrincipalAccessBoundaryPolicyCallable;
}
@Override
public OperationCallable<DeletePrincipalAccessBoundaryPolicyRequest, Empty, OperationMetadata>
deletePrincipalAccessBoundaryPolicyOperationCallable() {
return deletePrincipalAccessBoundaryPolicyOperationCallable;
}
@Override
public UnaryCallable<
ListPrincipalAccessBoundaryPoliciesRequest, ListPrincipalAccessBoundaryPoliciesResponse>
listPrincipalAccessBoundaryPoliciesCallable() {
return listPrincipalAccessBoundaryPoliciesCallable;
}
@Override
public UnaryCallable<
ListPrincipalAccessBoundaryPoliciesRequest,
ListPrincipalAccessBoundaryPoliciesPagedResponse>
listPrincipalAccessBoundaryPoliciesPagedCallable() {
return listPrincipalAccessBoundaryPoliciesPagedCallable;
}
@Override
public UnaryCallable<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsResponse>
searchPrincipalAccessBoundaryPolicyBindingsCallable() {
return searchPrincipalAccessBoundaryPolicyBindingsCallable;
}
@Override
public UnaryCallable<
SearchPrincipalAccessBoundaryPolicyBindingsRequest,
SearchPrincipalAccessBoundaryPolicyBindingsPagedResponse>
searchPrincipalAccessBoundaryPolicyBindingsPagedCallable() {
return searchPrincipalAccessBoundaryPolicyBindingsPagedCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
openjdk/jdk8 | 36,891 | jdk/src/share/classes/java/awt/MenuComponent.java | /*
* Copyright (c) 1995, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.awt;
import java.awt.peer.MenuComponentPeer;
import java.awt.event.ActionEvent;
import java.io.IOException;
import java.io.ObjectInputStream;
import sun.awt.AppContext;
import sun.awt.AWTAccessor;
import javax.accessibility.*;
import java.security.AccessControlContext;
import java.security.AccessController;
/**
* The abstract class <code>MenuComponent</code> is the superclass
* of all menu-related components. In this respect, the class
* <code>MenuComponent</code> is analogous to the abstract superclass
* <code>Component</code> for AWT components.
* <p>
* Menu components receive and process AWT events, just as components do,
* through the method <code>processEvent</code>.
*
* @author Arthur van Hoff
* @since JDK1.0
*/
public abstract class MenuComponent implements java.io.Serializable {
static {
/* ensure that the necessary native libraries are loaded */
Toolkit.loadLibraries();
if (!GraphicsEnvironment.isHeadless()) {
initIDs();
}
}
transient MenuComponentPeer peer;
transient MenuContainer parent;
/**
* The <code>AppContext</code> of the <code>MenuComponent</code>.
* This is set in the constructor and never changes.
*/
transient AppContext appContext;
/**
* The menu component's font. This value can be
* <code>null</code> at which point a default will be used.
* This defaults to <code>null</code>.
*
* @serial
* @see #setFont(Font)
* @see #getFont()
*/
Font font;
/**
* The menu component's name, which defaults to <code>null</code>.
* @serial
* @see #getName()
* @see #setName(String)
*/
private String name;
/**
* A variable to indicate whether a name is explicitly set.
* If <code>true</code> the name will be set explicitly.
* This defaults to <code>false</code>.
* @serial
* @see #setName(String)
*/
private boolean nameExplicitlySet = false;
/**
* Defaults to <code>false</code>.
* @serial
* @see #dispatchEvent(AWTEvent)
*/
boolean newEventsOnly = false;
/*
* The menu's AccessControlContext.
*/
private transient volatile AccessControlContext acc =
AccessController.getContext();
/*
* Returns the acc this menu component was constructed with.
*/
final AccessControlContext getAccessControlContext() {
if (acc == null) {
throw new SecurityException(
"MenuComponent is missing AccessControlContext");
}
return acc;
}
/*
* Internal constants for serialization.
*/
final static String actionListenerK = Component.actionListenerK;
final static String itemListenerK = Component.itemListenerK;
/*
* JDK 1.1 serialVersionUID
*/
private static final long serialVersionUID = -4536902356223894379L;
static {
AWTAccessor.setMenuComponentAccessor(
new AWTAccessor.MenuComponentAccessor() {
public AppContext getAppContext(MenuComponent menuComp) {
return menuComp.appContext;
}
public void setAppContext(MenuComponent menuComp,
AppContext appContext) {
menuComp.appContext = appContext;
}
public MenuContainer getParent(MenuComponent menuComp) {
return menuComp.parent;
}
public Font getFont_NoClientCode(MenuComponent menuComp) {
return menuComp.getFont_NoClientCode();
}
});
}
/**
* Creates a <code>MenuComponent</code>.
* @exception HeadlessException if
* <code>GraphicsEnvironment.isHeadless</code>
* returns <code>true</code>
* @see java.awt.GraphicsEnvironment#isHeadless
*/
public MenuComponent() throws HeadlessException {
GraphicsEnvironment.checkHeadless();
appContext = AppContext.getAppContext();
}
/**
* Constructs a name for this <code>MenuComponent</code>.
* Called by <code>getName</code> when the name is <code>null</code>.
* @return a name for this <code>MenuComponent</code>
*/
String constructComponentName() {
return null; // For strict compliance with prior platform versions, a MenuComponent
// that doesn't set its name should return null from
// getName()
}
/**
* Gets the name of the menu component.
* @return the name of the menu component
* @see java.awt.MenuComponent#setName(java.lang.String)
* @since JDK1.1
*/
public String getName() {
if (name == null && !nameExplicitlySet) {
synchronized(this) {
if (name == null && !nameExplicitlySet)
name = constructComponentName();
}
}
return name;
}
/**
* Sets the name of the component to the specified string.
* @param name the name of the menu component
* @see java.awt.MenuComponent#getName
* @since JDK1.1
*/
public void setName(String name) {
synchronized(this) {
this.name = name;
nameExplicitlySet = true;
}
}
/**
* Returns the parent container for this menu component.
* @return the menu component containing this menu component,
* or <code>null</code> if this menu component
* is the outermost component, the menu bar itself
*/
public MenuContainer getParent() {
return getParent_NoClientCode();
}
// NOTE: This method may be called by privileged threads.
// This functionality is implemented in a package-private method
// to insure that it cannot be overridden by client subclasses.
// DO NOT INVOKE CLIENT CODE ON THIS THREAD!
final MenuContainer getParent_NoClientCode() {
return parent;
}
/**
* @deprecated As of JDK version 1.1,
* programs should not directly manipulate peers.
*/
@Deprecated
public MenuComponentPeer getPeer() {
return peer;
}
/**
* Gets the font used for this menu component.
* @return the font used in this menu component, if there is one;
* <code>null</code> otherwise
* @see java.awt.MenuComponent#setFont
*/
public Font getFont() {
Font font = this.font;
if (font != null) {
return font;
}
MenuContainer parent = this.parent;
if (parent != null) {
return parent.getFont();
}
return null;
}
// NOTE: This method may be called by privileged threads.
// This functionality is implemented in a package-private method
// to insure that it cannot be overridden by client subclasses.
// DO NOT INVOKE CLIENT CODE ON THIS THREAD!
final Font getFont_NoClientCode() {
Font font = this.font;
if (font != null) {
return font;
}
// The MenuContainer interface does not have getFont_NoClientCode()
// and it cannot, because it must be package-private. Because of
// this, we must manually cast classes that implement
// MenuContainer.
Object parent = this.parent;
if (parent != null) {
if (parent instanceof Component) {
font = ((Component)parent).getFont_NoClientCode();
} else if (parent instanceof MenuComponent) {
font = ((MenuComponent)parent).getFont_NoClientCode();
}
}
return font;
} // getFont_NoClientCode()
/**
* Sets the font to be used for this menu component to the specified
* font. This font is also used by all subcomponents of this menu
* component, unless those subcomponents specify a different font.
* <p>
* Some platforms may not support setting of all font attributes
* of a menu component; in such cases, calling <code>setFont</code>
* will have no effect on the unsupported font attributes of this
* menu component. Unless subcomponents of this menu component
* specify a different font, this font will be used by those
* subcomponents if supported by the underlying platform.
*
* @param f the font to be set
* @see #getFont
* @see Font#getAttributes
* @see java.awt.font.TextAttribute
*/
public void setFont(Font f) {
font = f;
//Fixed 6312943: NullPointerException in method MenuComponent.setFont(Font)
MenuComponentPeer peer = this.peer;
if (peer != null) {
peer.setFont(f);
}
}
/**
* Removes the menu component's peer. The peer allows us to modify the
* appearance of the menu component without changing the functionality of
* the menu component.
*/
public void removeNotify() {
synchronized (getTreeLock()) {
MenuComponentPeer p = this.peer;
if (p != null) {
Toolkit.getEventQueue().removeSourceEvents(this, true);
this.peer = null;
p.dispose();
}
}
}
/**
* Posts the specified event to the menu.
* This method is part of the Java 1.0 event system
* and it is maintained only for backwards compatibility.
* Its use is discouraged, and it may not be supported
* in the future.
* @param evt the event which is to take place
* @deprecated As of JDK version 1.1, replaced by {@link
* #dispatchEvent(AWTEvent) dispatchEvent}.
*/
@Deprecated
public boolean postEvent(Event evt) {
MenuContainer parent = this.parent;
if (parent != null) {
parent.postEvent(evt);
}
return false;
}
/**
* Delivers an event to this component or one of its sub components.
* @param e the event
*/
public final void dispatchEvent(AWTEvent e) {
dispatchEventImpl(e);
}
void dispatchEventImpl(AWTEvent e) {
EventQueue.setCurrentEventAndMostRecentTime(e);
Toolkit.getDefaultToolkit().notifyAWTEventListeners(e);
if (newEventsOnly ||
(parent != null && parent instanceof MenuComponent &&
((MenuComponent)parent).newEventsOnly)) {
if (eventEnabled(e)) {
processEvent(e);
} else if (e instanceof ActionEvent && parent != null) {
e.setSource(parent);
((MenuComponent)parent).dispatchEvent(e);
}
} else { // backward compatibility
Event olde = e.convertToOld();
if (olde != null) {
postEvent(olde);
}
}
}
// REMIND: remove when filtering is done at lower level
boolean eventEnabled(AWTEvent e) {
return false;
}
/**
* Processes events occurring on this menu component.
* <p>Note that if the event parameter is <code>null</code>
* the behavior is unspecified and may result in an
* exception.
*
* @param e the event
* @since JDK1.1
*/
protected void processEvent(AWTEvent e) {
}
/**
* Returns a string representing the state of this
* <code>MenuComponent</code>. This method is intended to be used
* only for debugging purposes, and the content and format of the
* returned string may vary between implementations. The returned
* string may be empty but may not be <code>null</code>.
*
* @return the parameter string of this menu component
*/
protected String paramString() {
String thisName = getName();
return (thisName != null? thisName : "");
}
/**
* Returns a representation of this menu component as a string.
* @return a string representation of this menu component
*/
public String toString() {
return getClass().getName() + "[" + paramString() + "]";
}
/**
* Gets this component's locking object (the object that owns the thread
* synchronization monitor) for AWT component-tree and layout
* operations.
* @return this component's locking object
*/
protected final Object getTreeLock() {
return Component.LOCK;
}
/**
* Reads the menu component from an object input stream.
*
* @param s the <code>ObjectInputStream</code> to read
* @exception HeadlessException if
* <code>GraphicsEnvironment.isHeadless</code> returns
* <code>true</code>
* @serial
* @see java.awt.GraphicsEnvironment#isHeadless
*/
private void readObject(ObjectInputStream s)
throws ClassNotFoundException, IOException, HeadlessException
{
GraphicsEnvironment.checkHeadless();
acc = AccessController.getContext();
s.defaultReadObject();
appContext = AppContext.getAppContext();
}
/**
* Initialize JNI field and method IDs.
*/
private static native void initIDs();
/*
* --- Accessibility Support ---
*
* MenuComponent will contain all of the methods in interface Accessible,
* though it won't actually implement the interface - that will be up
* to the individual objects which extend MenuComponent.
*/
AccessibleContext accessibleContext = null;
/**
* Gets the <code>AccessibleContext</code> associated with
* this <code>MenuComponent</code>.
*
* The method implemented by this base class returns <code>null</code>.
* Classes that extend <code>MenuComponent</code>
* should implement this method to return the
* <code>AccessibleContext</code> associated with the subclass.
*
* @return the <code>AccessibleContext</code> of this
* <code>MenuComponent</code>
* @since 1.3
*/
public AccessibleContext getAccessibleContext() {
return accessibleContext;
}
/**
* Inner class of <code>MenuComponent</code> used to provide
* default support for accessibility. This class is not meant
* to be used directly by application developers, but is instead
* meant only to be subclassed by menu component developers.
* <p>
* The class used to obtain the accessible role for this object.
* @since 1.3
*/
protected abstract class AccessibleAWTMenuComponent
extends AccessibleContext
implements java.io.Serializable, AccessibleComponent,
AccessibleSelection
{
/*
* JDK 1.3 serialVersionUID
*/
private static final long serialVersionUID = -4269533416223798698L;
/**
* Although the class is abstract, this should be called by
* all sub-classes.
*/
protected AccessibleAWTMenuComponent() {
}
// AccessibleContext methods
//
/**
* Gets the <code>AccessibleSelection</code> associated with this
* object which allows its <code>Accessible</code> children to be selected.
*
* @return <code>AccessibleSelection</code> if supported by object;
* else return <code>null</code>
* @see AccessibleSelection
*/
public AccessibleSelection getAccessibleSelection() {
return this;
}
/**
* Gets the accessible name of this object. This should almost never
* return <code>java.awt.MenuComponent.getName</code>, as that
* generally isn't a localized name, and doesn't have meaning for the
* user. If the object is fundamentally a text object (e.g. a menu item), the
* accessible name should be the text of the object (e.g. "save").
* If the object has a tooltip, the tooltip text may also be an
* appropriate String to return.
*
* @return the localized name of the object -- can be <code>null</code>
* if this object does not have a name
* @see AccessibleContext#setAccessibleName
*/
public String getAccessibleName() {
return accessibleName;
}
/**
* Gets the accessible description of this object. This should be
* a concise, localized description of what this object is - what
* is its meaning to the user. If the object has a tooltip, the
* tooltip text may be an appropriate string to return, assuming
* it contains a concise description of the object (instead of just
* the name of the object - e.g. a "Save" icon on a toolbar that
* had "save" as the tooltip text shouldn't return the tooltip
* text as the description, but something like "Saves the current
* text document" instead).
*
* @return the localized description of the object -- can be
* <code>null</code> if this object does not have a description
* @see AccessibleContext#setAccessibleDescription
*/
public String getAccessibleDescription() {
return accessibleDescription;
}
/**
* Gets the role of this object.
*
* @return an instance of <code>AccessibleRole</code>
* describing the role of the object
* @see AccessibleRole
*/
public AccessibleRole getAccessibleRole() {
return AccessibleRole.AWT_COMPONENT; // Non-specific -- overridden in subclasses
}
/**
* Gets the state of this object.
*
* @return an instance of <code>AccessibleStateSet</code>
* containing the current state set of the object
* @see AccessibleState
*/
public AccessibleStateSet getAccessibleStateSet() {
return MenuComponent.this.getAccessibleStateSet();
}
/**
* Gets the <code>Accessible</code> parent of this object.
* If the parent of this object implements <code>Accessible</code>,
* this method should simply return <code>getParent</code>.
*
* @return the <code>Accessible</code> parent of this object -- can
* be <code>null</code> if this object does not have an
* <code>Accessible</code> parent
*/
public Accessible getAccessibleParent() {
if (accessibleParent != null) {
return accessibleParent;
} else {
MenuContainer parent = MenuComponent.this.getParent();
if (parent instanceof Accessible) {
return (Accessible) parent;
}
}
return null;
}
/**
* Gets the index of this object in its accessible parent.
*
* @return the index of this object in its parent; -1 if this
* object does not have an accessible parent
* @see #getAccessibleParent
*/
public int getAccessibleIndexInParent() {
return MenuComponent.this.getAccessibleIndexInParent();
}
/**
* Returns the number of accessible children in the object. If all
* of the children of this object implement <code>Accessible</code>,
* then this method should return the number of children of this object.
*
* @return the number of accessible children in the object
*/
public int getAccessibleChildrenCount() {
return 0; // MenuComponents don't have children
}
/**
* Returns the nth <code>Accessible</code> child of the object.
*
* @param i zero-based index of child
* @return the nth Accessible child of the object
*/
public Accessible getAccessibleChild(int i) {
return null; // MenuComponents don't have children
}
/**
* Returns the locale of this object.
*
* @return the locale of this object
*/
public java.util.Locale getLocale() {
MenuContainer parent = MenuComponent.this.getParent();
if (parent instanceof Component)
return ((Component)parent).getLocale();
else
return java.util.Locale.getDefault();
}
/**
* Gets the <code>AccessibleComponent</code> associated with
* this object if one exists. Otherwise return <code>null</code>.
*
* @return the component
*/
public AccessibleComponent getAccessibleComponent() {
return this;
}
// AccessibleComponent methods
//
/**
* Gets the background color of this object.
*
* @return the background color, if supported, of the object;
* otherwise, <code>null</code>
*/
public Color getBackground() {
return null; // Not supported for MenuComponents
}
/**
* Sets the background color of this object.
* (For transparency, see <code>isOpaque</code>.)
*
* @param c the new <code>Color</code> for the background
* @see Component#isOpaque
*/
public void setBackground(Color c) {
// Not supported for MenuComponents
}
/**
* Gets the foreground color of this object.
*
* @return the foreground color, if supported, of the object;
* otherwise, <code>null</code>
*/
public Color getForeground() {
return null; // Not supported for MenuComponents
}
/**
* Sets the foreground color of this object.
*
* @param c the new <code>Color</code> for the foreground
*/
public void setForeground(Color c) {
// Not supported for MenuComponents
}
/**
* Gets the <code>Cursor</code> of this object.
*
* @return the <code>Cursor</code>, if supported, of the object;
* otherwise, <code>null</code>
*/
public Cursor getCursor() {
return null; // Not supported for MenuComponents
}
/**
* Sets the <code>Cursor</code> of this object.
* <p>
* The method may have no visual effect if the Java platform
* implementation and/or the native system do not support
* changing the mouse cursor shape.
* @param cursor the new <code>Cursor</code> for the object
*/
public void setCursor(Cursor cursor) {
// Not supported for MenuComponents
}
/**
* Gets the <code>Font</code> of this object.
*
* @return the <code>Font</code>,if supported, for the object;
* otherwise, <code>null</code>
*/
public Font getFont() {
return MenuComponent.this.getFont();
}
/**
* Sets the <code>Font</code> of this object.
*
* @param f the new <code>Font</code> for the object
*/
public void setFont(Font f) {
MenuComponent.this.setFont(f);
}
/**
* Gets the <code>FontMetrics</code> of this object.
*
* @param f the <code>Font</code>
* @return the FontMetrics, if supported, the object;
* otherwise, <code>null</code>
* @see #getFont
*/
public FontMetrics getFontMetrics(Font f) {
return null; // Not supported for MenuComponents
}
/**
* Determines if the object is enabled.
*
* @return true if object is enabled; otherwise, false
*/
public boolean isEnabled() {
return true; // Not supported for MenuComponents
}
/**
* Sets the enabled state of the object.
*
* @param b if true, enables this object; otherwise, disables it
*/
public void setEnabled(boolean b) {
// Not supported for MenuComponents
}
/**
* Determines if the object is visible. Note: this means that the
* object intends to be visible; however, it may not in fact be
* showing on the screen because one of the objects that this object
* is contained by is not visible. To determine if an object is
* showing on the screen, use <code>isShowing</code>.
*
* @return true if object is visible; otherwise, false
*/
public boolean isVisible() {
return true; // Not supported for MenuComponents
}
/**
* Sets the visible state of the object.
*
* @param b if true, shows this object; otherwise, hides it
*/
public void setVisible(boolean b) {
// Not supported for MenuComponents
}
/**
* Determines if the object is showing. This is determined by checking
* the visibility of the object and ancestors of the object. Note:
* this will return true even if the object is obscured by another
* (for example, it happens to be underneath a menu that was pulled
* down).
*
* @return true if object is showing; otherwise, false
*/
public boolean isShowing() {
return true; // Not supported for MenuComponents
}
/**
* Checks whether the specified point is within this object's bounds,
* where the point's x and y coordinates are defined to be relative to
* the coordinate system of the object.
*
* @param p the <code>Point</code> relative to the coordinate
* system of the object
* @return true if object contains <code>Point</code>; otherwise false
*/
public boolean contains(Point p) {
return false; // Not supported for MenuComponents
}
/**
* Returns the location of the object on the screen.
*
* @return location of object on screen -- can be <code>null</code>
* if this object is not on the screen
*/
public Point getLocationOnScreen() {
return null; // Not supported for MenuComponents
}
/**
* Gets the location of the object relative to the parent in the form
* of a point specifying the object's top-left corner in the screen's
* coordinate space.
*
* @return an instance of <code>Point</code> representing the
* top-left corner of the object's bounds in the coordinate
* space of the screen; <code>null</code> if
* this object or its parent are not on the screen
*/
public Point getLocation() {
return null; // Not supported for MenuComponents
}
/**
* Sets the location of the object relative to the parent.
*/
public void setLocation(Point p) {
// Not supported for MenuComponents
}
/**
* Gets the bounds of this object in the form of a
* <code>Rectangle</code> object.
* The bounds specify this object's width, height, and location
* relative to its parent.
*
* @return a rectangle indicating this component's bounds;
* <code>null</code> if this object is not on the screen
*/
public Rectangle getBounds() {
return null; // Not supported for MenuComponents
}
/**
* Sets the bounds of this object in the form of a
* <code>Rectangle</code> object.
* The bounds specify this object's width, height, and location
* relative to its parent.
*
* @param r a rectangle indicating this component's bounds
*/
public void setBounds(Rectangle r) {
// Not supported for MenuComponents
}
/**
* Returns the size of this object in the form of a
* <code>Dimension</code> object. The height field of
* the <code>Dimension</code> object contains this object's
* height, and the width field of the <code>Dimension</code>
* object contains this object's width.
*
* @return a <code>Dimension</code> object that indicates the
* size of this component; <code>null</code>
* if this object is not on the screen
*/
public Dimension getSize() {
return null; // Not supported for MenuComponents
}
/**
* Resizes this object.
*
* @param d - the <code>Dimension</code> specifying the
* new size of the object
*/
public void setSize(Dimension d) {
// Not supported for MenuComponents
}
/**
* Returns the <code>Accessible</code> child, if one exists,
* contained at the local coordinate <code>Point</code>.
* If there is no <code>Accessible</code> child, <code>null</code>
* is returned.
*
* @param p the point defining the top-left corner of the
* <code>Accessible</code>, given in the coordinate space
* of the object's parent
* @return the <code>Accessible</code>, if it exists,
* at the specified location; else <code>null</code>
*/
public Accessible getAccessibleAt(Point p) {
return null; // MenuComponents don't have children
}
/**
* Returns whether this object can accept focus or not.
*
* @return true if object can accept focus; otherwise false
*/
public boolean isFocusTraversable() {
return true; // Not supported for MenuComponents
}
/**
* Requests focus for this object.
*/
public void requestFocus() {
// Not supported for MenuComponents
}
/**
* Adds the specified focus listener to receive focus events from this
* component.
*
* @param l the focus listener
*/
public void addFocusListener(java.awt.event.FocusListener l) {
// Not supported for MenuComponents
}
/**
* Removes the specified focus listener so it no longer receives focus
* events from this component.
*
* @param l the focus listener
*/
public void removeFocusListener(java.awt.event.FocusListener l) {
// Not supported for MenuComponents
}
// AccessibleSelection methods
//
/**
* Returns the number of <code>Accessible</code> children currently selected.
* If no children are selected, the return value will be 0.
*
* @return the number of items currently selected
*/
public int getAccessibleSelectionCount() {
return 0; // To be fully implemented in a future release
}
/**
* Returns an <code>Accessible</code> representing the specified
* selected child in the object. If there isn't a selection, or there are
* fewer children selected than the integer passed in, the return
* value will be <code>null</code>.
* <p>Note that the index represents the i-th selected child, which
* is different from the i-th child.
*
* @param i the zero-based index of selected children
* @return the i-th selected child
* @see #getAccessibleSelectionCount
*/
public Accessible getAccessibleSelection(int i) {
return null; // To be fully implemented in a future release
}
/**
* Determines if the current child of this object is selected.
*
* @return true if the current child of this object is selected;
* else false
* @param i the zero-based index of the child in this
* <code>Accessible</code> object
* @see AccessibleContext#getAccessibleChild
*/
public boolean isAccessibleChildSelected(int i) {
return false; // To be fully implemented in a future release
}
/**
* Adds the specified <code>Accessible</code> child of the object
* to the object's selection. If the object supports multiple selections,
* the specified child is added to any existing selection, otherwise
* it replaces any existing selection in the object. If the
* specified child is already selected, this method has no effect.
*
* @param i the zero-based index of the child
* @see AccessibleContext#getAccessibleChild
*/
public void addAccessibleSelection(int i) {
// To be fully implemented in a future release
}
/**
* Removes the specified child of the object from the object's
* selection. If the specified item isn't currently selected, this
* method has no effect.
*
* @param i the zero-based index of the child
* @see AccessibleContext#getAccessibleChild
*/
public void removeAccessibleSelection(int i) {
// To be fully implemented in a future release
}
/**
* Clears the selection in the object, so that no children in the
* object are selected.
*/
public void clearAccessibleSelection() {
// To be fully implemented in a future release
}
/**
* Causes every child of the object to be selected
* if the object supports multiple selections.
*/
public void selectAllAccessibleSelection() {
// To be fully implemented in a future release
}
} // inner class AccessibleAWTComponent
/**
* Gets the index of this object in its accessible parent.
*
* @return -1 if this object does not have an accessible parent;
* otherwise, the index of the child in its accessible parent.
*/
int getAccessibleIndexInParent() {
MenuContainer localParent = parent;
if (!(localParent instanceof MenuComponent)) {
// MenuComponents only have accessible index when inside MenuComponents
return -1;
}
MenuComponent localParentMenu = (MenuComponent)localParent;
return localParentMenu.getAccessibleChildIndex(this);
}
/**
* Gets the index of the child within this MenuComponent.
*
* @param child MenuComponent whose index we are interested in.
* @return -1 if this object doesn't contain the child,
* otherwise, index of the child.
*/
int getAccessibleChildIndex(MenuComponent child) {
return -1; // Overridden in subclasses.
}
/**
* Gets the state of this object.
*
* @return an instance of <code>AccessibleStateSet</code>
* containing the current state set of the object
* @see AccessibleState
*/
AccessibleStateSet getAccessibleStateSet() {
AccessibleStateSet states = new AccessibleStateSet();
return states;
}
}
|
apache/drill | 35,691 | exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestVarDecimalFunctions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.fn.impl;
import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.test.BaseTestQuery;
import org.hamcrest.CoreMatchers;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
@Category(SqlFunctionTest.class)
public class TestVarDecimalFunctions extends BaseTestQuery {
@BeforeClass
public static void enableDecimalDataType() {
setSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
}
@AfterClass
public static void disableDecimalDataType() {
resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
}
@Rule
public ExpectedException expectedException = ExpectedException.none();
// Tests for math functions
@Test
public void testDecimalAdd() throws Exception {
String query =
"select\n" +
// checks trimming of scale
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11))\n" +
"+ cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as s1,\n" +
// sanitary checks
"cast('1234567891234567891234567891234567.89' as DECIMAL(36, 2))\n" +
"+ cast('123456789123456789123456789123456.789' as DECIMAL(36, 3)) as s2,\n" +
"cast('1234567891234567891234567891234567.89' as DECIMAL(36, 2))\n" +
"+ cast('0' as DECIMAL(36, 3)) as s3,\n" +
"cast('15.02' as DECIMAL(4, 2)) - cast('12.93' as DECIMAL(4, 2)) as s4,\n" +
"cast('11.02' as DECIMAL(4, 2)) - cast('12.93' as DECIMAL(4, 2)) as s5,\n" +
"cast('0' as DECIMAL(36, 2)) - cast('12.93' as DECIMAL(36, 2)) as s6,\n" +
// check trimming (digits after decimal point will be trimmed from result)
"cast('9999999999999999999999999999234567891.1' as DECIMAL(38, 1))\n" +
"+ cast('3234567891234567891234567891234567891.1' as DECIMAL(38, 1)) as s7";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6", "s7")
.baselineValues(
new BigDecimal("999999999999999999999999999.92345678912")
.add(new BigDecimal("0.32345678912345678912345678912345678912"))
.round(new MathContext(38, RoundingMode.HALF_UP)),
new BigDecimal("1358024680358024680358024680358024.679"),
new BigDecimal("1234567891234567891234567891234567.890"),
new BigDecimal("2.09"), new BigDecimal("-1.91"), new BigDecimal("-12.93"),
new BigDecimal("13234567891234567891234567890469135782"))
.go();
}
@Test
public void testDecimalAddOverflow() throws Exception {
String query =
"select\n" +
"cast('99999999999999999999999999992345678912' as DECIMAL(38, 0))\n" +
"+ cast('32345678912345678912345678912345678912' as DECIMAL(38, 0)) as s7";
expectedException.expect(UserRemoteException.class);
expectedException.expectMessage(
CoreMatchers.containsString("VALIDATION ERROR: Value 132345678912345678912345678904691357820 " +
"overflows specified precision 38 with scale 0."));
test(query);
}
@Test
public void testDecimalMultiply() throws Exception {
String query =
"select\n" +
// checks trimming of scale
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11))\n" +
"* cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as s1,\n" +
// sanitary checks
"cast('1234567.89' as DECIMAL(9, 2))\n" +
"* cast('-1.789' as DECIMAL(4, 3)) as s2,\n" +
"cast('15.02' as DECIMAL(4, 2)) * cast('0' as DECIMAL(4, 2)) as s3,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) * cast('1' as DECIMAL(1, 0)) as s4";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4")
.baselineValues(new BigDecimal("999999999999999999999999999.92345678912")
.multiply(new BigDecimal("0.32345678912345678912345678912345678912"))
.round(new MathContext(38, RoundingMode.HALF_UP)),
new BigDecimal("-2208641.95521"),
new BigDecimal("0.0000"), new BigDecimal("12.93123456789"))
.go();
}
@Test
public void testDecimalMultiplyOverflow() throws Exception {
String query = "select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11))\n" +
" * cast('323456789123.45678912345678912345678912' as DECIMAL(38, 26)) as s1";
expectedException.expect(UserRemoteException.class);
expectedException.expectMessage(
CoreMatchers.containsString("VALIDATION ERROR: Value 323456789123456789123456789098698367900 " +
"overflows specified precision 38 with scale 0."));
test(query);
}
@Test
public void testDecimalDivide() throws Exception {
String query =
"select\n" +
// checks trimming of scale
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"/ cast('0.0000000000000000000000000000000000001' as DECIMAL(38, 37)) as s1,\n" +
// sanitary checks
"cast('1234567.89' as DECIMAL(9, 2))\n" +
"/ cast('-1.789' as DECIMAL(4, 3)) as s2,\n" +
"cast('15.02' as DECIMAL(4, 2)) / cast('15.02' as DECIMAL(4, 2)) as s3,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) / cast('1' as DECIMAL(1, 0)) as s4,\n" +
"cast('0' as DECIMAL(1, 0)) / cast('15.02' as DECIMAL(4, 2)) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("19999999999999999999999999999234567891"),
new BigDecimal("-690088.2560089"),
new BigDecimal("1.0000000"), new BigDecimal("12.9312345678900"), new BigDecimal("0.000000"))
.go();
}
@Test
public void testDecimalDivideOverflow() throws Exception {
String query = "select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
" / cast('0.00000000000000000000000000000000000001' as DECIMAL(38, 38)) as s1";
expectedException.expect(UserRemoteException.class);
expectedException.expectMessage(
CoreMatchers.containsString("VALIDATION ERROR: Value 199999999999999999999999999992345678910 " +
"overflows specified precision 38 with scale 0"));
test(query);
}
@Test
public void testDecimalMod() throws Exception {
String query =
"select\n" +
"mod(cast('1111' as DECIMAL(4, 0)), cast('12' as DECIMAL(2, 0))) as s1,\n" +
"mod(cast('1234567' as DECIMAL(7, 0)),\n" +
"cast('-9' as DECIMAL(1, 0))) as s2,\n" +
"mod(cast('-1502' as DECIMAL(4, 0)), cast('15' as DECIMAL(2, 0))) as s3,\n" +
"mod(cast('-987654' as DECIMAL(6, 0)), cast('-31' as DECIMAL(2, 0))) as s4";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4")
.baselineValues(new BigDecimal("7"), new BigDecimal("1"),
new BigDecimal("-2"), new BigDecimal("-25"))
.go();
}
@Test
public void testDecimalAbs() throws Exception {
String query =
"select\n" +
"abs(cast('1111' as DECIMAL(4, 0))) as s1,\n" +
"abs(cast('-1234567.123456' as DECIMAL(13, 6))) as s2,\n" +
"abs(cast('-1502' as DECIMAL(4, 0))) as s3,\n" +
"abs(cast('0' as DECIMAL(4, 0))) as s4,\n" +
"abs(cast('-987654' as DECIMAL(6, 0))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111"), new BigDecimal("1234567.123456"),
new BigDecimal("1502"), new BigDecimal("0"), new BigDecimal("987654"))
.go();
}
@Test
public void testDecimalCeil() throws Exception {
String query =
"select\n" +
"ceil(cast('1111.35' as DECIMAL(6, 2))) as s1,\n" +
"ceiling(cast('1234567.123456' as DECIMAL(13, 6))) as s2,\n" +
"ceil(cast('-1502.5' as DECIMAL(5, 1))) as s3,\n" +
"ceiling(cast('987654.5' as DECIMAL(7, 1))) as s4,\n" +
"ceil(cast('987654.00' as DECIMAL(8, 2))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1112"), new BigDecimal("1234568"),
new BigDecimal("-1502"), new BigDecimal("987655"), new BigDecimal("987654"))
.go();
}
@Test
public void testDecimalFloor() throws Exception {
String query =
"select\n" +
"floor(cast('1111.35' as DECIMAL(6, 2))) as s1,\n" +
"floor(cast('1234567.123456' as DECIMAL(13, 6))) as s2,\n" +
"floor(cast('-1502.5' as DECIMAL(5, 1))) as s3,\n" +
"floor(cast('987654.5' as DECIMAL(7, 1))) as s4,\n" +
"floor(cast('987654.00' as DECIMAL(8, 2))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111"), new BigDecimal("1234567"),
new BigDecimal("-1503"), new BigDecimal("987654"), new BigDecimal("987654"))
.go();
}
@Test
public void testDecimalTrunc() throws Exception {
String query =
"select\n" +
"trunc(cast('1111.35' as DECIMAL(6, 2))) as s1,\n" +
"truncate(cast('1234567.123456' as DECIMAL(13, 6))) as s2,\n" +
"trunc(cast('-1502.5' as DECIMAL(5, 1))) as s3,\n" +
"truncate(cast('987654.5' as DECIMAL(7, 1))) as s4,\n" +
"trunc(cast('987654.00' as DECIMAL(8, 2))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111"), new BigDecimal("1234567"),
new BigDecimal("-1502"), new BigDecimal("987654"), new BigDecimal("987654"))
.go();
}
@Test
public void testDecimalRound() throws Exception {
String query =
"select\n" +
"round(cast('1111.45' as DECIMAL(6, 2))) as s1,\n" +
"round(cast('1234567.523456' as DECIMAL(13, 6))) as s2,\n" +
"round(cast('-1502.5' as DECIMAL(5, 1))) as s3,\n" +
"round(cast('-987654.4' as DECIMAL(7, 1))) as s4,\n" +
"round(cast('987654.00' as DECIMAL(8, 2))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111"), new BigDecimal("1234568"),
new BigDecimal("-1503"), new BigDecimal("-987654"), new BigDecimal("987654"))
.go();
}
@Test
public void testDecimalSign() throws Exception {
String query =
"select\n" +
"sign(cast('+1111.45' as DECIMAL(6, 2))) as s1,\n" +
"sign(cast('-1234567.523456' as DECIMAL(13, 6))) as s2,\n" +
"sign(cast('-1502.5' as DECIMAL(5, 1))) as s3,\n" +
"sign(cast('987654.4' as DECIMAL(7, 1))) as s4,\n" +
"sign(cast('0' as DECIMAL(8, 2))) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(1, -1, -1, 1, 0)
.go();
}
@Test
public void testDecimalRoundWithScale() throws Exception {
String query =
"select\n" +
"round(cast('1111.45' as DECIMAL(6, 2)), 1) as s1,\n" +
"round(cast('1234567.523456' as DECIMAL(13, 6)), 5) as s2,\n" +
"round(cast('-1502.5' as DECIMAL(5, 1)), 0) as s3,\n" +
"round(cast('-987654.4' as DECIMAL(7, 1)), 1) as s4,\n" +
"round(cast('987654.00' as DECIMAL(8, 2)), 2) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111.5"), new BigDecimal("1234567.52346"),
new BigDecimal("-1503"), new BigDecimal("-987654.4"), new BigDecimal("987654.00"))
.go();
}
@Test
public void testDecimalTruncWithScale() throws Exception {
String query =
"select\n" +
"trunc(cast('1111.45' as DECIMAL(6, 2)), 1) as s1,\n" +
"truncate(cast('1234567.523456' as DECIMAL(13, 6)), 5) as s2,\n" +
"trunc(cast('-1502.5' as DECIMAL(5, 1)), 0) as s3,\n" +
"truncate(cast('-987654.4' as DECIMAL(7, 1)), 1) as s4,\n" +
"trunc(cast('987654.00' as DECIMAL(8, 2)), 2) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("1111.4"), new BigDecimal("1234567.52345"),
new BigDecimal("-1502"), new BigDecimal("-987654.4"), new BigDecimal("987654.00"))
.go();
}
// Tests for comparison functions
@Test
public void testDecimalEquals() throws Exception {
String query =
"select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"= cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s1,\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"<> cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s2,\n" +
// the same value but different scale and precision
"cast('1234567.89' as DECIMAL(9, 2)) = cast('1234567.890' as DECIMAL(10, 3)) as s3,\n" +
"cast('0' as DECIMAL(4, 2)) = cast('0' as DECIMAL(1, 0)) as s4,\n" +
"cast('0' as DECIMAL(4, 2)) <> cast('0' as DECIMAL(1, 0)) as s5,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) = cast('12.93123456788' as DECIMAL(13, 11)) as s6";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(true, false, true, true, false, false)
.go();
}
@Test
public void testDecimalLessThan() throws Exception {
String query =
"select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"< cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s1,\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"< cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37)) as s2,\n" +
// the same value but different scale and precision
"cast('1234567.89' as DECIMAL(9, 2)) < cast('1234567.890' as DECIMAL(10, 3)) as s3,\n" +
"cast('0' as DECIMAL(4, 2)) < cast('0' as DECIMAL(1, 0)) as s4,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) < cast('12.93123456788' as DECIMAL(13, 11)) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(false, true, false, false, false)
.go();
}
@Test
public void testDecimalLessThanEquals() throws Exception {
String query =
"select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"<= cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s1,\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"<= cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37)) as s2,\n" +
// the same value but different scale and precision
"cast('1234567.89' as DECIMAL(9, 2)) <= cast('1234567.890' as DECIMAL(10, 3)) as s3,\n" +
"cast('0' as DECIMAL(4, 2)) <= cast('0' as DECIMAL(1, 0)) as s4,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) <= cast('12.93123456788' as DECIMAL(13, 11)) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(true, true, true, true, false)
.go();
}
@Test
public void testDecimalGreaterThan() throws Exception {
String query =
"select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"> cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s1,\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
"> cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37)) as s2,\n" +
// the same value but different scale and precision
"cast('1234567.89' as DECIMAL(9, 2)) > cast('1234567.890' as DECIMAL(10, 3)) as s3,\n" +
"cast('0' as DECIMAL(4, 2)) > cast('0' as DECIMAL(1, 0)) as s4,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) > cast('12.93123456788' as DECIMAL(13, 11)) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(false, false, false, false, true)
.go();
}
@Test
public void testDecimalGreaterThanEquals() throws Exception {
String query =
"select\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
">= cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)) as s1,\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))\n" +
">= cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37)) as s2,\n" +
// the same value but different scale and precision
"cast('1234567.89' as DECIMAL(9, 2)) >= cast('1234567.890' as DECIMAL(10, 3)) as s3,\n" +
"cast('0' as DECIMAL(4, 2)) >= cast('0' as DECIMAL(1, 0)) as s4,\n" +
"cast('12.93123456789' as DECIMAL(13, 11)) >= cast('12.93123456788' as DECIMAL(13, 11)) as s5";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(true, false, true, true, true)
.go();
}
@Test
public void testDecimalCompareToNullsHigh() throws Exception {
String query =
"select\n" +
"compare_to_nulls_high(cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)),\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))) as s1,\n" +
"compare_to_nulls_high(cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)),\n" +
"cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37))) as s2,\n" +
// the same value but different scale and precision
"compare_to_nulls_high(cast('1234567.89' as DECIMAL(9, 2)), cast('1234567.890' as DECIMAL(10, 3))) as s3,\n" +
"compare_to_nulls_high(cast('0' as DECIMAL(4, 2)), cast('0' as DECIMAL(1, 0))) as s4,\n" +
"compare_to_nulls_high(cast('0' as DECIMAL(4, 2)), cast(null as DECIMAL(1, 0))) as s5,\n" +
"compare_to_nulls_high(cast('12.93123456789' as DECIMAL(13, 11)), " +
"cast('12.93123456788' as DECIMAL(13, 11))) as s6";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(0, -1, 0, 0, -1, 1)
.go();
}
@Test
public void testDecimalCompareToNullsLow() throws Exception {
String query =
"select\n" +
"compare_to_nulls_low(cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)),\n" +
"cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37))) as s1,\n" +
"compare_to_nulls_low(cast('1.9999999999999999999999999999234567891' as DECIMAL(38, 37)),\n" +
"cast('1.9999999999999999999999999999234567892' as DECIMAL(38, 37))) as s2,\n" +
// the same value but different scale and precision
"compare_to_nulls_low(cast('1234567.89' as DECIMAL(9, 2)), cast('1234567.890' as DECIMAL(10, 3))) as s3,\n" +
"compare_to_nulls_low(cast('0' as DECIMAL(4, 2)), cast('0' as DECIMAL(1, 0))) as s4,\n" +
"compare_to_nulls_low(cast('0' as DECIMAL(4, 2)), cast(null as DECIMAL(1, 0))) as s5,\n" +
"compare_to_nulls_low(cast('12.93123456789' as DECIMAL(13, 11)), " +
"cast('12.93123456788' as DECIMAL(13, 11))) as s6";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(0, -1, 0, 0, 1, 1)
.go();
}
// Tests for cast functions
@Test
public void testCastIntDecimal() throws Exception {
String query =
"select\n" +
"cast(i1 as DECIMAL(4, 0)) as s1,\n" +
"cast(i2 as DECIMAL(7, 0)) as s2,\n" +
"cast(i3 as DECIMAL(8, 0)) as s3,\n" +
"cast(i4 as DECIMAL(6, 0)) as s4,\n" +
"cast(i5 as DECIMAL(6, 0)) as s5,\n" +
"cast(i6 as DECIMAL(10, 0)) as s6,\n" +
"cast(i7 as DECIMAL(10, 0)) as s7\n" +
"from (" +
"select\n" +
"cast(0 as int) as i1,\n" +
"cast(1234567 as int) as i2,\n" +
"cast(-15022222 as int) as i3,\n" +
"cast(-987654 as int) as i4,\n" +
"cast(987654 as int) as i5,\n" +
"cast(%s as int) as i6,\n" +
"cast(%s as int) as i7)";
testBuilder()
.sqlQuery(query, Integer.MAX_VALUE, Integer.MIN_VALUE)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6", "s7")
.baselineValues(BigDecimal.valueOf(0), BigDecimal.valueOf(1234567),
BigDecimal.valueOf(-15022222), BigDecimal.valueOf(-987654),BigDecimal.valueOf(987654),
BigDecimal.valueOf(Integer.MAX_VALUE), BigDecimal.valueOf(Integer.MIN_VALUE))
.go();
}
@Test
public void testCastDecimalInt() throws Exception {
String query =
"select\n" +
"cast(i1 as int) as s1,\n" +
"cast(i2 as int) as s2,\n" +
"cast(i3 as int) as s3,\n" +
"cast(i4 as int) as s4,\n" +
"cast(i5 as int) as s5,\n" +
"cast(i6 as int) as s6,\n" +
"cast(i7 as int) as s7\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5,\n" +
"cast('-15.02' as DECIMAL(4, 2)) as i6,\n" +
"cast('0.7877' as DECIMAL(4, 4)) as i7)";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6", "s7")
.baselineValues(
new BigDecimal("999999999999999999999999999.92345678912")
.setScale(0, BigDecimal.ROUND_HALF_UP).intValue(),
0,
new BigDecimal("-1234567891234567891234567891234567.89")
.setScale(0, BigDecimal.ROUND_HALF_UP).intValue(),
0, 15, -15, 1)
.go();
}
@Test
public void testCastBigIntDecimal() throws Exception {
String query =
"select\n" +
"cast(i1 as DECIMAL(4, 0)) as s1,\n" +
"cast(i2 as DECIMAL(7, 0)) as s2,\n" +
"cast(i3 as DECIMAL(8, 0)) as s3,\n" +
"cast(i4 as DECIMAL(6, 0)) as s4,\n" +
"cast(i5 as DECIMAL(6, 0)) as s5,\n" +
"cast(i6 as DECIMAL(19, 0)) as s6,\n" +
"cast(i7 as DECIMAL(19, 0)) as s7\n" +
"from (" +
"select " +
"cast(0 as bigint) as i1,\n" +
"cast(1234567 as bigint) as i2,\n" +
"cast(-15022222 as bigint) as i3,\n" +
"cast(-987654 as bigint) as i4,\n" +
"cast(987654 as bigint) as i5,\n" +
"cast(%s as bigint) as i6,\n" +
"cast(%s as bigint) as i7)";
testBuilder()
.sqlQuery(query, Long.MAX_VALUE, Long.MIN_VALUE)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6", "s7")
.baselineValues(new BigDecimal("0"), new BigDecimal("1234567"),
new BigDecimal("-15022222"), new BigDecimal("-987654"), new BigDecimal("987654"),
BigDecimal.valueOf(Long.MAX_VALUE), BigDecimal.valueOf(Long.MIN_VALUE))
.go();
}
@Test
public void testCastDecimalBigInt() throws Exception {
String query =
"select\n" +
"cast(i1 as bigint) as s1,\n" +
"cast(i2 as bigint) as s2,\n" +
"cast(i3 as bigint) as s3,\n" +
"cast(i4 as bigint) as s4,\n" +
"cast(i5 as bigint) as s5,\n" +
"cast(i6 as bigint) as s6,\n" +
"cast(i7 as bigint) as s7\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5,\n" +
"cast('-15.02' as DECIMAL(4, 2)) as i6,\n" +
"cast('0.7877' as DECIMAL(4, 4)) as i7)";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6", "s7")
.baselineValues(
new BigDecimal("999999999999999999999999999.92345678912")
.setScale(0, BigDecimal.ROUND_HALF_UP).longValue(),
0L,
new BigDecimal("-1234567891234567891234567891234567.89")
.setScale(0, BigDecimal.ROUND_HALF_UP).longValue(),
0L, 15L, -15L, 1L)
.go();
}
@Test
public void testCastFloatDecimal() throws Exception {
String query =
"select\n" +
"cast(i1 as DECIMAL(4, 0)) as s1,\n" +
"cast(i2 as DECIMAL(7, 6)) as s2,\n" +
"cast(i3 as DECIMAL(8, 7)) as s3,\n" +
"cast(i4 as DECIMAL(6, 6)) as s4,\n" +
"cast(i5 as DECIMAL(7, 0)) as s5,\n" +
"cast(i6 as DECIMAL(38, 38)) as s6\n" +
"from (" +
"select\n" +
"cast(0 as float) as i1,\n" +
"cast(1.234567 as float) as i2,\n" +
"cast(-1.5022222 as float) as i3,\n" +
"cast(-0.987654 as float) as i4,\n" +
"cast(9999999 as float) as i5,\n" +
"cast('%s' as float) as i6)";
testBuilder()
.sqlQuery(query, Float.MIN_VALUE)
.unOrdered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(BigDecimal.valueOf(0), new BigDecimal("1.234567"),
new BigDecimal("-1.5022222"), new BigDecimal("-0.987654"), BigDecimal.valueOf(9999999),
new BigDecimal(Float.MIN_VALUE).setScale(38, RoundingMode.HALF_UP))
.go();
}
@Test
public void testCastDecimalFloat() throws Exception {
String query =
"select\n" +
"cast(i1 as float) as s1,\n" +
"cast(i2 as float) as s2,\n" +
"cast(i3 as float) as s3,\n" +
"cast(i4 as float) as s4,\n" +
"cast(i5 as float) as s5,\n" +
"cast(i6 as float) as s6\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5,\n" +
"cast('%s' as DECIMAL(38, 38)) as i6)";
testBuilder()
.sqlQuery(query, Float.MIN_VALUE)
.unOrdered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(new BigDecimal("999999999999999999999999999.92345678912").floatValue(),
new BigDecimal("0.32345678912345678912345678912345678912").floatValue(),
new BigDecimal("-1234567891234567891234567891234567.89").floatValue(),
0f, 15.02f, 0.0f)
.go();
}
@Test
public void testCastDoubleDecimal() throws Exception {
String query =
"select\n" +
"cast(i1 as DECIMAL(4, 0)) as s1,\n" +
"cast(i2 as DECIMAL(7, 6)) as s2,\n" +
"cast(i3 as DECIMAL(8, 7)) as s3,\n" +
"cast(i4 as DECIMAL(6, 6)) as s4,\n" +
"cast(i5 as DECIMAL(7, 0)) as s5,\n" +
"cast(i6 as DECIMAL(38, 38)) as s6\n" +
"from (" +
"select\n" +
"cast(0 as double) as i1,\n" +
"cast(1.234567 as double) as i2,\n" +
"cast(-1.5022222 as double) as i3,\n" +
"cast(-0.987654 as double) as i4,\n" +
"cast(9999999 as double) as i5,\n" +
"cast('%e' as double) as i6)";
testBuilder()
.sqlQuery(query, Double.MIN_VALUE)
.unOrdered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(BigDecimal.valueOf(0), new BigDecimal("1.234567"),
new BigDecimal("-1.5022222"), new BigDecimal("-0.987654"), BigDecimal.valueOf(9999999),
new BigDecimal(String.valueOf(Double.MIN_VALUE)).setScale(38, RoundingMode.HALF_UP))
.go();
}
@Test
public void testCastDecimalDouble() throws Exception {
String query =
"select\n" +
"cast(i1 as double) as s1,\n" +
"cast(i2 as double) as s2,\n" +
"cast(i3 as double) as s3,\n" +
"cast(i4 as double) as s4,\n" +
"cast(i5 as double) as s5,\n" +
"cast(i6 as double) as s6\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5,\n" +
"cast('%e' as DECIMAL(38, 38)) as i6)";
testBuilder()
.sqlQuery(query, Double.MIN_VALUE)
.unOrdered()
.baselineColumns("s1", "s2", "s3", "s4", "s5", "s6")
.baselineValues(new BigDecimal("999999999999999999999999999.92345678912").doubleValue(),
new BigDecimal("0.32345678912345678912345678912345678912").doubleValue(),
new BigDecimal("-1234567891234567891234567891234567.89").doubleValue(),
0d, 15.02, 0.)
.go();
}
@Test
public void testCastDecimalVarchar() throws Exception {
String query =
"select\n" +
"cast(i1 as varchar) as s1,\n" +
"cast(i2 as varchar) as s2,\n" +
"cast(i3 as varchar) as s3,\n" +
"cast(i4 as varchar) as s4,\n" +
"cast(i5 as varchar) as s5\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5)";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues("999999999999999999999999999.92345678912",
"0.32345678912345678912345678912345678912",
"-1234567891234567891234567891234567.89", "0.000", "15.02")
.go();
}
@Test
public void testDecimalToChar() throws Exception {
String query =
"select\n" +
"to_char(i1, '#.#') as s1,\n" +
"to_char(i2, '#.#') as s2,\n" +
"to_char(i3, '#.#') as s3,\n" +
"to_char(i4, '#.#') as s4,\n" +
"to_char(i5, '#.#') as s5\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5)";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues("999999999999999999999999999.9", "0.3",
"-1234567891234567891234567891234567.9", "0", "15")
.go();
}
@Test
public void testDecimalNegate() throws Exception {
String query =
"select\n" +
"negative(i1) as s1,\n" +
"-i2 as s2,\n" +
"negative(i3) as s3,\n" +
"-i4 as s4,\n" +
"negative(i5) as s5\n" +
"from (" +
"select\n" +
"cast('999999999999999999999999999.92345678912' as DECIMAL(38, 11)) as i1,\n" +
"cast('0.32345678912345678912345678912345678912' as DECIMAL(38, 38)) as i2,\n" +
"cast('-1234567891234567891234567891234567.89' as DECIMAL(36, 2)) as i3,\n" +
"cast('0' as DECIMAL(36, 3)) as i4,\n" +
"cast('15.02' as DECIMAL(4, 2)) as i5)";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("s1", "s2", "s3", "s4", "s5")
.baselineValues(new BigDecimal("-999999999999999999999999999.92345678912"),
new BigDecimal("-0.32345678912345678912345678912345678912"),
new BigDecimal("1234567891234567891234567891234567.89"),
new BigDecimal("0.000"),
new BigDecimal("-15.02"))
.go();
}
}
|
apache/trafodion | 36,822 | dcs/src/main/java/org/trafodion/dcs/master/ServerManager.java | /**
* @@@ START COPYRIGHT @@@
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* @@@ END COPYRIGHT @@@
*/
package org.trafodion.dcs.master;
import java.net.InetAddress;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.util.Scanner;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
import java.util.List;
import java.util.ArrayList;
import java.util.concurrent.Callable;
import java.util.concurrent.Executors;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ExecutionException;
import java.util.Date;
import java.util.Comparator;
import java.util.Map;
import java.util.HashMap;
import java.text.DateFormat;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.trafodion.dcs.master.RunningServer;
import org.trafodion.dcs.master.RegisteredServer;
import org.trafodion.dcs.master.Metrics;
import org.trafodion.dcs.script.ScriptManager;
import org.trafodion.dcs.script.ScriptContext;
import org.trafodion.dcs.Constants;
import org.trafodion.dcs.zookeeper.ZkClient;
import org.trafodion.dcs.util.*;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
public class ServerManager implements Callable {
private static final Log LOG = LogFactory.getLog(ServerManager.class);
private DcsMaster master;
private Configuration conf;
private DcsNetworkConfiguration netConf;
private ZkClient zkc = null;
private long startupTimestamp;
private int maxRestartAttempts;
private int retryIntervalMillis;
private ExecutorService pool = null;
private Metrics metrics;
private String parentZnode;
private RetryCounterFactory retryCounterFactory;
private final ArrayList<String> configuredServers = new ArrayList<String>();
private final Map<String, ServerPortMap> serverPortMap = new HashMap<String, ServerPortMap>();
private final ArrayList<String> runningServers = new ArrayList<String>();
private final ArrayList<String> registeredServers = new ArrayList<String>();
private final Queue<RestartHandler> restartQueue = new LinkedList<RestartHandler>();
private final ArrayList<ServerItem> serverItemList = new ArrayList<ServerItem>();
private boolean trafodionQueryToolsEnabled;
private JdbcT4Util jdbcT4Util = null;
public ServerManager(DcsMaster master, Configuration conf, ZkClient zkc,
DcsNetworkConfiguration netConf, long startupTimestamp,
Metrics metrics) throws Exception {
try {
this.master = master;
this.conf = conf;
this.zkc = zkc;
this.netConf = netConf;
this.startupTimestamp = startupTimestamp;
this.metrics = metrics;
maxRestartAttempts = conf
.getInt(Constants.DCS_MASTER_SERVER_RESTART_HANDLER_ATTEMPTS,
Constants.DEFAULT_DCS_MASTER_SERVER_RESTART_HANDLER_ATTEMPTS);
retryIntervalMillis = conf
.getInt(Constants.DCS_MASTER_SERVER_RESTART_HANDLER_RETRY_INTERVAL_MILLIS,
Constants.DEFAULT_DCS_MASTER_SERVER_RESTART_HANDLER_RETRY_INTERVAL_MILLIS);
trafodionQueryToolsEnabled = conf.getBoolean(
Constants.DCS_MASTER_TRAFODION_QUERY_TOOLS,
Constants.DEFAULT_DCS_MASTER_TRAFODION_QUERY_TOOLS);
if (trafodionQueryToolsEnabled)
jdbcT4Util = new JdbcT4Util(conf, netConf);
retryCounterFactory = new RetryCounterFactory(maxRestartAttempts,
retryIntervalMillis);
parentZnode = conf.get(Constants.ZOOKEEPER_ZNODE_PARENT,
Constants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
pool = Executors.newSingleThreadExecutor();
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
throw e;
}
}
class RestartHandler implements Callable<ScriptContext> {
private ScriptContext scriptContext = new ScriptContext();
private String znodePath;
private int childCount;
public RestartHandler(String znodePath, int childCount) {
this.znodePath = znodePath;
this.childCount = childCount;
}
@Override
public ScriptContext call() throws Exception {
try {
Scanner scn = new Scanner(znodePath);
scn.useDelimiter(":");
String hostName = scn.next();// host name
String instance = scn.next();// instance
int infoPort = Integer.parseInt(scn.next()); // UI port
long serverStartTimestamp = Long.parseLong(scn.next());
scn.close();
// Get the --config property from classpath...it's always first
// in the classpath
String cp = System.getProperty("java.class.path");
scn = new Scanner(cp);
scn.useDelimiter(":");
String confDir = scn.next();
scn.close();
if (LOG.isDebugEnabled())
LOG.debug("conf dir [" + confDir + "]");
// Get -Ddcs.home.dir
String dcsHome = System.getProperty("dcs.home.dir");
// If stop-dcs.sh is executed and DCS_MANAGES_ZK then zookeeper
// is stopped abruptly.
// Second scenario is when ZooKeeper fails for some reason
// regardless of whether DCS
// manages it. When either happens the DcsServer running znodes
// still exist in ZooKeeper
// and we see them at next startup. When they eventually timeout
// we get node deleted events for a server that no longer
// exists. So, only recognize
// DcsServer running znodes that have timestamps after last
// DcsMaster startup.
//
// But, if we are DcsMaster follower that is taking over from
// failed one then ignore timestamp issues described above.
// See MasterLeaderElection.elect()
if ((master.isFollower() == false && serverStartTimestamp > startupTimestamp)
|| (master.isFollower() && runningServers.size() < configuredServers.size())) {
scriptContext.setHostName(hostName);
scriptContext
.setScriptName(Constants.SYS_SHELL_SCRIPT_NAME);
if (hostName.equalsIgnoreCase(netConf.getHostName()))
scriptContext.setCommand("bin/dcs-daemon.sh --config "
+ confDir + " start server " + instance + " "
+ childCount);
else
scriptContext.setCommand("pdsh -w " + hostName
+ " \"cd " + dcsHome
+ ";bin/dcs-daemon.sh --config " + confDir
+ " start server " + instance + " "
+ childCount + "\"");
RetryCounter retryCounter = retryCounterFactory.create();
while (true) {
if (scriptContext.getStdOut().length() > 0)
scriptContext.getStdOut().delete(0,
scriptContext.getStdOut().length());
if (scriptContext.getStdErr().length() > 0)
scriptContext.getStdErr().delete(0,
scriptContext.getStdErr().length());
LOG.info("Restarting DcsServer [" + hostName + ":"
+ instance + "], script [ "
+ scriptContext.toString() + " ]");
ScriptManager.getInstance().runScript(scriptContext);
if (scriptContext.getExitCode() == 0) {
LOG.info("DcsServer [" + hostName + ":" + instance
+ "] restarted");
break;
} else {
StringBuilder sb = new StringBuilder();
sb.append("exit code ["
+ scriptContext.getExitCode() + "]");
if (!scriptContext.getStdOut().toString().isEmpty())
sb.append(", stdout ["
+ scriptContext.getStdOut().toString()
+ "]");
if (!scriptContext.getStdErr().toString().isEmpty())
sb.append(", stderr ["
+ scriptContext.getStdErr().toString()
+ "]");
if (LOG.isErrorEnabled())
LOG.error(sb.toString());
if (!retryCounter.shouldRetry()) {
if (LOG.isErrorEnabled())
LOG.error("DcsServer [" + hostName + ":"
+ instance
+ "] restart failed after "
+ retryCounter.getMaxRetries()
+ " retries");
break;
} else {
retryCounter.sleepUntilNextRetry();
retryCounter.useRetry();
}
}
}
} else {
StringBuffer sb = new StringBuffer();
sb.append("No restart for ").append(znodePath).append(System.getProperty("line.separator"));
sb.append("DCS Master isFollower [").append(master.isFollower()).append("], ");
sb.append("DCS Master start time [")
.append(DateFormat.getDateTimeInstance().format(new Date(startupTimestamp))).append("], ");
sb.append("DCS Server start time [")
.append(DateFormat.getDateTimeInstance().format(new Date(serverStartTimestamp))).append("], ");
sb.append("running DCS Server num is [").append(runningServers.size())
.append("], registered DCS Server num is [").append(registeredServers.size()).append("].");
LOG.info(sb.toString());
}
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
}
return scriptContext;
}
}
class RunningWatcher implements Watcher {
public void process(WatchedEvent event) {
if (event.getType() == Event.EventType.NodeChildrenChanged) {
if (LOG.isDebugEnabled())
LOG.debug("Running children changed [" + event.getPath()
+ "]");
try {
getZkRunning();
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
}
} else if (event.getType() == Event.EventType.NodeDeleted) {
String znodePath = event.getPath();
if (LOG.isDebugEnabled())
LOG.debug("Running znode deleted [" + znodePath + "]");
try {
restartServer(znodePath);
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
}
}
}
}
class RegisteredWatcher implements Watcher {
public void process(WatchedEvent event) {
if (event.getType() == Event.EventType.NodeChildrenChanged) {
if (LOG.isDebugEnabled())
LOG.debug("Registered children changed [" + event.getPath()
+ "]");
try {
getZkRegistered();
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
}
}
}
}
@Override
public Boolean call() throws Exception {
long timeoutMillis = 5000;
try {
getServersFile();
createServersPortMap();
getZkRunning();
getUnwathedServers();
getZkRegistered();
while (true) {
while (!restartQueue.isEmpty()) {
if (LOG.isDebugEnabled())
LOG.debug("Restart queue size [" + restartQueue.size()
+ "]");
RestartHandler handler = restartQueue.poll();
Future<ScriptContext> runner = pool.submit(handler);
ScriptContext scriptContext = runner.get();// blocking call
// In some situation, there may restart dcs server replicated.
// Exit code == -2 means dcs server had been started,
// no needs to add to restart queue.
if (scriptContext.getExitCode() != 0 && scriptContext.getExitCode() != -2) {
restartQueue.add(handler);
}
}
try {
Thread.sleep(timeoutMillis);
} catch (InterruptedException e) {
}
}
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e);
pool.shutdown();
throw e;
}
}
private List<String> getChildren(String znode, Watcher watcher)
throws Exception {
List<String> children = null;
children = zkc.getChildren(znode, watcher);
if (!children.isEmpty())
Collections.sort(children);
return children;
}
private void getServersFile() throws Exception {
InputStream is = this.getClass().getResourceAsStream("/servers");
if (is == null)
throw new IOException("Cannot find servers file");
BufferedReader br = new BufferedReader(new InputStreamReader(is));
configuredServers.clear();
String line;
int lineNum = 1;
while ((line = br.readLine()) != null) {
Scanner scn = new Scanner(line);
scn.useDelimiter(" ");
String hostName = null;
String serverCount = null;
if (scn.hasNext())
hostName = scn.next();// host name
else
hostName = new String("localhost");
if (scn.hasNext())
serverCount = scn.next();// optional
else
serverCount = "1";
scn.close();
if (hostName.equalsIgnoreCase("localhost")) {
hostName = netConf.getHostName();
}
if (LOG.isDebugEnabled())
LOG.debug("Adding to configured servers [" + hostName + ":"
+ lineNum + ":" + serverCount + "]");
configuredServers.add(hostName + ":" + lineNum + ":" + serverCount);
lineNum++;
}
Collections.sort(configuredServers);
if (configuredServers.size() < 1)
throw new IOException("No entries found in servers file");
int lnum = 1;
for (int i = 0; i < configuredServers.size(); i++) {
if (LOG.isDebugEnabled())
LOG.debug("servers file line " + lnum + " ["
+ configuredServers.get(i) + "]");
lnum++;
}
}
class ServerPortMap {
int begPortNum = conf.getInt(Constants.DCS_MASTER_PORT,
Constants.DEFAULT_DCS_MASTER_PORT) + 1;
int endPortNum = begPortNum;
StringBuilder sb = new StringBuilder();
public void add(int instance, int childCount) {
for (int i = 1; i <= childCount; i++) {
if (endPortNum > begPortNum)
sb.append(":");
sb.append(instance + ":" + i + ":" + endPortNum);
endPortNum++;
}
}
public String toString() {
return sb.toString();
}
}
private void createServersPortMap() throws Exception {
serverPortMap.clear();
for (String aServer : configuredServers) {
Scanner scn = new Scanner(aServer);
scn.useDelimiter(":");
String hostName = scn.next();
int instance = Integer.parseInt(scn.next());
int childCount = Integer.parseInt(scn.next());
scn.close();
ServerPortMap spm = serverPortMap.get(hostName);
if (spm == null)
spm = new ServerPortMap();
spm.add(instance, childCount);
serverPortMap.put(hostName, spm);
}
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, ServerPortMap> entry : serverPortMap.entrySet()) {
LOG.debug(("Key = " + entry.getKey() + ", Value = " + entry
.getValue()));
sb.append(entry.getValue());
sb.append(":");
}
LOG.debug("Setting " + parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED
+ " data [" + sb.toString() + "]");
byte[] data = Bytes.toBytes(sb.toString());
zkc.setData(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED, data,
-1);
}
private synchronized void getZkRunning() throws Exception {
if (LOG.isDebugEnabled())
LOG.debug("Reading " + parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_RUNNING);
List<String> children = getChildren(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_RUNNING,
new RunningWatcher());
if (!children.isEmpty()) {
for (String child : children) {
// If stop-dcs.sh is executed and DCS_MANAGES_ZK then zookeeper
// is stopped abruptly.
// Second scenario is when ZooKeeper fails for some reason
// regardless of whether DCS
// manages it. When either happens the DcsServer running znodes
// still exist in ZooKeeper
// and we see them at next startup. When they eventually timeout
// we get node deleted events for a server that no longer
// exists. So, only recognize
// DcsServer running znodes that have timestamps after last
// DcsMaster startup.
Scanner scn = new Scanner(child);
scn.useDelimiter(":");
String hostName = scn.next();
String instance = scn.next();
int infoPort = Integer.parseInt(scn.next());
long serverStartTimestamp = Long.parseLong(scn.next());
scn.close();
// If we are DcsMaster follower that is taking over from failed
// one then ignore timestamp issues described above.
// See MasterLeaderElection.elect()
if (master.isFollower() == false) {
if (serverStartTimestamp < startupTimestamp)
continue;
}
if (!runningServers.contains(child)) {
if (LOG.isDebugEnabled())
LOG.debug("Watching running [" + child + "]");
zkc.exists(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_RUNNING
+ "/" + child, new RunningWatcher());
runningServers.add(child);
}
}
metrics.setTotalRunning(runningServers.size());
} else {
metrics.setTotalRunning(0);
}
}
private void getUnwathedServers() {
// In some situation when open HA, if DCS Server does not have znode info in zookeeper
// when DCS Master is starting, then server will never be watched by zookeeper,
// and if it downs, it will never be restarted.
// configuredServers
// hostName + ":" + lineNum + ":" + serverCount
// runningServers
// hostName + ":" + instance + ":" + infoPort + ":" + serverStartTimestamp
// eg : gy26.esgyncn.local:3:24413:1515056285028
// RestartHandler need to know hostName, instanceNum(lineNum), serverStartTimestamp(for if condition)
if (!master.isFollower() || runningServers.size() == configuredServers.size()) {
if (LOG.isDebugEnabled()) {
if (!master.isFollower()) {
LOG.debug("dcs master start normally, no need to add watchers");
} else {
LOG.debug("backup master start, all dcs servers have started, no need to add watchers");
}
}
return;
}
boolean found = false;
for (String configured : configuredServers) {
Scanner configuredScn = new Scanner(configured);
configuredScn.useDelimiter(":");
String hostName = configuredScn.next();
int instance = Integer.parseInt(configuredScn.next());
int serverCount = Integer.parseInt(configuredScn.next());
configuredScn.close();
for (String running : runningServers) {
Scanner runningScn = new Scanner(running);
runningScn.useDelimiter(":");
String runningHostName = runningScn.next();
runningScn.close();
if (runningHostName.equals(hostName)) {
found = true;
break;
}
}
if (found) {
found = false;
continue;
} else {
LOG.error("DcsServer [" + hostName + ":" + instance + "] does not started when starting DcsMaster [" + master.getServerName() + "] add to restart queue.");
// add to the restart handler
String simulatePath = hostName + ":" + instance + ":0:" + System.currentTimeMillis();
RestartHandler handler = new RestartHandler(simulatePath, serverCount);
restartQueue.add(handler);
}
}
}
private synchronized void restartServer(String znodePath) throws Exception {
String child = znodePath.replace(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_RUNNING + "/", "");
Scanner scn = new Scanner(child);
scn.useDelimiter(":");
String hostName = scn.next();
String instance = scn.next();
int infoPort = Integer.parseInt(scn.next());
long serverStartTimestamp = Long.parseLong(scn.next());
scn.close();
if (LOG.isErrorEnabled())
LOG.error("DcsServer [" + hostName + ":" + instance + "] failed.");
if (runningServers.contains(child)) {
LOG.debug("Found [" + child
+ "], deleting from running servers list");
runningServers.remove(child);
metrics.setTotalRunning(runningServers.size());
}
// Extract the server count for the restarting instance
int count = 1;
boolean found = false;
for (String aServer : configuredServers) {
scn = new Scanner(aServer);
scn.useDelimiter(":");
String srvrHostName = scn.next();
String srvrInstance = scn.next();
int srvrCount = new Integer(scn.next()).intValue();
scn.close();
if (srvrHostName.equals(hostName) && srvrInstance.equals(instance)) {
LOG.debug("Found [" + srvrHostName + ":" + srvrInstance + ":"
+ srvrCount + "] in configured servers");
found = true;
if (srvrCount > 0)
count = srvrCount;
break;
}
}
// For local-servers.sh don't restart anything that's not in the servers
// file
if (!found) {
LOG.info("DcsServer [" + hostName + ":" + instance
+ "] not in servers file. Not restarting");
return;
}
RestartHandler handler = new RestartHandler(child, count);
restartQueue.add(handler);
}
private synchronized void getZkRegistered() throws Exception {
if (LOG.isDebugEnabled())
LOG.debug("Reading " + parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED);
List<String> children = getChildren(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED,
new RegisteredWatcher());
if (!children.isEmpty()) {
registeredServers.clear();
for (String child : children) {
if (LOG.isDebugEnabled())
LOG.debug("Registered [" + child + "]");
registeredServers.add(child);
}
metrics.setTotalRegistered(registeredServers.size());
} else {
metrics.setTotalRegistered(0);
}
}
public synchronized List<RunningServer> getServersList() {
ArrayList<RunningServer> serverList = new ArrayList<RunningServer>();
Stat stat = null;
byte[] data = null;
int totalAvailable = 0;
int totalConnecting = 0;
int totalConnected = 0;
if (LOG.isDebugEnabled())
LOG.debug("Begin getServersList()");
if (!runningServers.isEmpty()) {
for (String aRunningServer : runningServers) {
RunningServer runningServer = new RunningServer();
Scanner scn = new Scanner(aRunningServer);
scn.useDelimiter(":");
runningServer.setHostname(scn.next());
runningServer.setInstance(scn.next());
runningServer.setInfoPort(Integer.parseInt(scn.next()));
runningServer.setStartTime(Long.parseLong(scn.next()));
scn.close();
if (!registeredServers.isEmpty()) {
for (String aRegisteredServer : registeredServers) {
if (aRegisteredServer.contains(runningServer
.getHostname()
+ ":"
+ runningServer.getInstance() + ":")) {
try {
RegisteredServer registeredServer = new RegisteredServer();
stat = zkc
.exists(parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED
+ "/" + aRegisteredServer,
false);
if (stat != null) {
data = zkc
.getData(
parentZnode
+ Constants.DEFAULT_ZOOKEEPER_ZNODE_SERVERS_REGISTERED
+ "/"
+ aRegisteredServer,
false, stat);
scn = new Scanner(new String(data));
scn.useDelimiter(":");
if (LOG.isDebugEnabled())
LOG.debug("getDataRegistered ["
+ new String(data) + "]");
registeredServer.setState(scn.next());
String state = registeredServer.getState();
if (state.equals("AVAILABLE"))
totalAvailable += 1;
else if (state.equals("CONNECTING"))
totalConnecting += 1;
else if (state.equals("CONNECTED"))
totalConnected += 1;
registeredServer.setTimestamp(Long
.parseLong(scn.next()));
registeredServer.setDialogueId(scn.next());
registeredServer.setNid(scn.next());
registeredServer.setPid(scn.next());
registeredServer.setProcessName(scn.next());
registeredServer.setIpAddress(scn.next());
registeredServer.setPort(scn.next());
registeredServer.setClientName(scn.next());
registeredServer.setClientIpAddress(scn
.next());
registeredServer.setClientPort(scn.next());
registeredServer.setClientAppl(scn.next());
registeredServer.setIsRegistered();
scn.close();
runningServer.getRegistered().add(
registeredServer);
}
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error("Exception: " + e.getMessage());
}
}
}
}
serverList.add(runningServer);
}
}
metrics.setTotalAvailable(totalAvailable);
metrics.setTotalConnecting(totalConnecting);
metrics.setTotalConnected(totalConnected);
Collections.sort(serverList, new Comparator<RunningServer>() {
public int compare(RunningServer s1, RunningServer s2) {
if (s1.getInstanceIntValue() == s2.getInstanceIntValue())
return 0;
return s1.getInstanceIntValue() < s2.getInstanceIntValue() ? -1
: 1;
}
});
if (LOG.isDebugEnabled())
LOG.debug("End getServersList()");
return serverList;
}
public synchronized List<ServerItem> getServerItemList() {
if (LOG.isDebugEnabled())
LOG.debug("Begin getServerItemList()");
serverItemList.clear();
for (RunningServer aRunningServer : this.getServersList()) {
for (RegisteredServer aRegisteredServer : aRunningServer
.getRegistered()) {
ServerItem serverItem = new ServerItem();
serverItem.setHostname(aRunningServer.getHostname());
serverItem.setinfoPort(aRunningServer.getInfoPort() + "");
serverItem.setInstance(aRunningServer.getInstance());
serverItem.setStartTime(aRunningServer.getStartTimeAsDate());
serverItem.setIsRegistered(aRegisteredServer.getIsRegistered());
serverItem.setState(aRegisteredServer.getState());
serverItem.setNid(aRegisteredServer.getNid());
serverItem.setPid(aRegisteredServer.getPid());
serverItem.setProcessName(aRegisteredServer.getProcessName());
serverItem.setIpAddress(aRegisteredServer.getIpAddress());
serverItem.setPort(aRegisteredServer.getPort());
serverItem.setClientName(aRegisteredServer.getClientName());
serverItem.setClientAppl(aRegisteredServer.getClientAppl());
serverItem.setClientIpAddress(aRegisteredServer
.getClientIpAddress());
serverItem.setClientPort(aRegisteredServer.getClientPort());
serverItemList.add(serverItem);
}
}
if (LOG.isDebugEnabled())
LOG.debug("End getServerItemList()");
return serverItemList;
}
public synchronized List<JSONObject> getRepositoryItemList(String command) {
if (LOG.isDebugEnabled())
LOG.debug("Begin getRepositoryItemList()");
JSONArray reposList = null;
reposList = getRepositoryListT4Driver(command);
List<JSONObject> objList = new ArrayList<JSONObject>();
if (reposList != null) {
try {
for (int i = 0; i < reposList.length(); i++) {
objList.add(reposList.getJSONObject(i));
}
} catch (Exception e) {
e.printStackTrace();
if (LOG.isErrorEnabled())
LOG.error(e.getMessage());
}
}
if (LOG.isDebugEnabled())
LOG.debug("End getRepositoryItemList()");
return objList;
}
public synchronized JSONArray getRepositoryListT4Driver(String command) {
if (LOG.isDebugEnabled())
LOG.debug("Begin getRepositoryListT4Driver()");
JSONArray reposList = null;
StringBuilder sb = new StringBuilder();
if (command.equals(Constants.TRAFODION_REPOS_METRIC_SESSION_TABLE)) {
sb.append(conf
.get(Constants.TRAFODION_REPOS_METRIC_SESSION_TABLE_QUERY,
Constants.DEFAULT_TRAFODION_REPOS_METRIC_SESSION_TABLE_QUERY));
} else if (command.equals(Constants.TRAFODION_REPOS_METRIC_QUERY_TABLE)) {
sb.append(conf.get(
Constants.TRAFODION_REPOS_METRIC_QUERY_TABLE_QUERY,
Constants.DEFAULT_TRAFODION_REPOS_METRIC_QUERY_TABLE_QUERY));
} else if (command
.equals(Constants.TRAFODION_REPOS_METRIC_QUERY_AGGR_TABLE)) {
sb.append(conf
.get(Constants.TRAFODION_REPOS_METRIC_QUERY_AGGR_TABLE_QUERY,
Constants.DEFAULT_TRAFODION_REPOS_METRIC_QUERY_AGGR_TABLE_QUERY));
} else
sb.append(command);
if (LOG.isDebugEnabled())
LOG.debug("command [" + sb.toString() + "]");
// reposList = jdbcT4Util.executeQuery(sb.toString());
if (LOG.isDebugEnabled())
LOG.debug("End getRepositoryListT4Driver()");
return reposList;
}
public String getZKParentZnode() {
return parentZnode;
}
public ZkClient getZkClient() {
return zkc;
}
public JdbcT4Util getJdbcT4Util() {
return jdbcT4Util;
}
}
|
apache/usergrid | 36,867 | stack/core/src/main/java/org/apache/usergrid/corepersistence/CpEntityManagerFactory.java | /*
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.corepersistence;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.TypeLiteral;
import org.apache.commons.lang.StringUtils;
import org.apache.usergrid.corepersistence.asyncevents.AsyncEventService;
import org.apache.usergrid.corepersistence.index.CollectionSettingsFactory;
import org.apache.usergrid.corepersistence.index.ReIndexRequestBuilder;
import org.apache.usergrid.corepersistence.index.ReIndexService;
import org.apache.usergrid.corepersistence.service.CollectionService;
import org.apache.usergrid.corepersistence.service.ConnectionService;
import org.apache.usergrid.corepersistence.util.CpNamingUtils;
import org.apache.usergrid.exception.ConflictException;
import org.apache.usergrid.locking.LockManager;
import org.apache.usergrid.mq.QueueManagerFactory;
import org.apache.usergrid.persistence.*;
import org.apache.usergrid.persistence.actorsystem.ActorSystemFig;
import org.apache.usergrid.persistence.actorsystem.ActorSystemManager;
import org.apache.usergrid.persistence.cassandra.CassandraService;
import org.apache.usergrid.persistence.cassandra.CounterUtils;
import org.apache.usergrid.persistence.cassandra.Setup;
import org.apache.usergrid.persistence.collection.EntityCollectionManager;
import org.apache.usergrid.persistence.collection.exception.CollectionRuntimeException;
import org.apache.usergrid.persistence.collection.serialization.impl.migration.EntityIdScope;
import org.apache.usergrid.persistence.collection.uniquevalues.UniqueValuesService;
import org.apache.usergrid.persistence.core.metrics.MetricsFactory;
import org.apache.usergrid.persistence.core.migration.data.MigrationDataProvider;
import org.apache.usergrid.persistence.core.scope.ApplicationScope;
import org.apache.usergrid.persistence.core.scope.ApplicationScopeImpl;
import org.apache.usergrid.persistence.core.util.Health;
import org.apache.usergrid.persistence.entities.Application;
import org.apache.usergrid.persistence.exceptions.ApplicationAlreadyExistsException;
import org.apache.usergrid.persistence.exceptions.DuplicateUniquePropertyExistsException;
import org.apache.usergrid.persistence.exceptions.EntityNotFoundException;
import org.apache.usergrid.persistence.graph.*;
import org.apache.usergrid.persistence.graph.impl.SimpleSearchByEdgeType;
import org.apache.usergrid.persistence.index.EntityIndex;
import org.apache.usergrid.persistence.model.entity.Id;
import org.apache.usergrid.persistence.model.entity.SimpleId;
import org.apache.usergrid.persistence.model.util.UUIDGenerator;
import org.apache.usergrid.persistence.qakka.App;
import org.apache.usergrid.persistence.qakka.distributed.DistributedQueueService;
import org.apache.usergrid.persistence.qakka.distributed.impl.QueueActorRouterProducer;
import org.apache.usergrid.persistence.qakka.distributed.impl.QueueSenderRouterProducer;
import org.apache.usergrid.persistence.qakka.distributed.impl.QueueWriterRouterProducer;
import org.apache.usergrid.utils.UUIDUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import rx.Observable;
import java.util.*;
import static java.lang.String.CASE_INSENSITIVE_ORDER;
import static org.apache.usergrid.persistence.Schema.PROPERTY_NAME;
import static org.apache.usergrid.persistence.Schema.TYPE_APPLICATION;
/**
* Implement good-old Usergrid EntityManagerFactory with the new-fangled Core Persistence API.
* This is where we keep track of applications and system properties.
*/
public class CpEntityManagerFactory implements EntityManagerFactory, ApplicationContextAware {
private static final Logger logger = LoggerFactory.getLogger( CpEntityManagerFactory.class );
private final EntityManagerFig entityManagerFig;
private final ActorSystemFig actorSystemFig;
private ApplicationContext applicationContext;
private Setup setup = null;
EntityManager managementAppEntityManager = null;
// cache of already instantiated entity managers
private final String ENTITY_MANAGER_CACHE_SIZE = "entity.manager.cache.size";
private final LoadingCache<UUID, EntityManager> entityManagers;
private final ApplicationIdCache applicationIdCache;
Application managementApp = null;
private ManagerCache managerCache;
private CassandraService cassandraService;
private CounterUtils counterUtils;
private Injector injector;
private final ReIndexService reIndexService;
private final MetricsFactory metricsFactory;
private final AsyncEventService indexService;
private final CollectionService collectionService;
private final ConnectionService connectionService;
private final GraphManagerFactory graphManagerFactory;
private final CollectionSettingsFactory collectionSettingsFactory;
private ActorSystemManager actorSystemManager;
private final LockManager lockManager;
private final QueueManagerFactory queueManagerFactory;
public static final String MANAGEMENT_APP_INIT_MAXRETRIES= "management.app.init.max-retries";
public static final String MANAGEMENT_APP_INIT_INTERVAL = "management.app.init.interval";
public CpEntityManagerFactory(
final CassandraService cassandraService, final CounterUtils counterUtils, final Injector injector ) {
this.cassandraService = cassandraService;
this.counterUtils = counterUtils;
this.injector = injector;
this.reIndexService = injector.getInstance(ReIndexService.class);
this.entityManagerFig = injector.getInstance(EntityManagerFig.class);
this.actorSystemFig = injector.getInstance( ActorSystemFig.class );
this.managerCache = injector.getInstance( ManagerCache.class );
this.metricsFactory = injector.getInstance( MetricsFactory.class );
this.indexService = injector.getInstance( AsyncEventService.class );
this.graphManagerFactory = injector.getInstance( GraphManagerFactory.class );
this.collectionService = injector.getInstance( CollectionService.class );
this.connectionService = injector.getInstance( ConnectionService.class );
this.collectionSettingsFactory = injector.getInstance( CollectionSettingsFactory.class );
Properties properties = cassandraService.getProperties();
this.entityManagers = createEntityManagerCache( properties );
logger.info("EntityManagerFactoring starting...");
if ( actorSystemFig.getEnabled() ) {
try {
logger.info("Akka cluster starting...");
// TODO: fix this kludge
injector.getInstance( App.class );
this.actorSystemManager = injector.getInstance( ActorSystemManager.class );
actorSystemManager.registerRouterProducer( injector.getInstance( UniqueValuesService.class ) );
actorSystemManager.registerRouterProducer( injector.getInstance( QueueActorRouterProducer.class ) );
actorSystemManager.registerRouterProducer( injector.getInstance( QueueWriterRouterProducer.class ) );
actorSystemManager.registerRouterProducer( injector.getInstance( QueueSenderRouterProducer.class ) );
actorSystemManager.start();
actorSystemManager.waitForClientActor();
DistributedQueueService distributedQueueService =
injector.getInstance( DistributedQueueService.class );
distributedQueueService.init();
} catch (Throwable t) {
logger.error("Error starting Akka", t);
throw t;
}
}
this.lockManager = injector.getInstance( LockManager.class );
this.queueManagerFactory = injector.getInstance( QueueManagerFactory.class );
// this line always needs to be last due to the temporary cicular dependency until spring is removed
this.applicationIdCache = injector.getInstance(ApplicationIdCacheFactory.class).getInstance(
getManagementEntityManager() );
checkManagementApp( properties );
}
private LoadingCache<UUID, EntityManager> createEntityManagerCache(Properties properties) {
int entityManagerCacheSize = 100;
try {
entityManagerCacheSize = Integer.parseInt( properties.getProperty( ENTITY_MANAGER_CACHE_SIZE, "100" ));
} catch ( Exception e ) {
logger.error("Error parsing " + ENTITY_MANAGER_CACHE_SIZE + ". Will use " + entityManagerCacheSize, e );
}
return CacheBuilder.newBuilder()
.maximumSize(entityManagerCacheSize)
.build(new CacheLoader<UUID, EntityManager>() {
public EntityManager load( UUID appId ) { // no checked exception
// create new entity manager and pre-fetch its application
EntityManager entityManager = _getEntityManager( appId );
Application app = null;
Throwable throwable = null;
try {
app = entityManager.getApplication();
} catch (Throwable t) {
throwable = t;
}
// the management app is a special case
if ( CpNamingUtils.MANAGEMENT_APPLICATION_ID.equals( appId ) ) {
if ( app != null ) {
// we successfully fetched up the management app, cache it for a rainy day
managementAppEntityManager = entityManager;
} else if ( managementAppEntityManager != null ) {
// failed to fetch management app, use cached one
entityManager = managementAppEntityManager;
logger.error("Failed to fetch management app");
}
}
// missing keyspace means we have not done bootstrap yet
final boolean isBootstrapping;
if ( throwable instanceof CollectionRuntimeException ) {
CollectionRuntimeException cre = (CollectionRuntimeException) throwable;
isBootstrapping = cre.isBootstrapping();
} else {
isBootstrapping = false;
}
// work around for https://issues.apache.org/jira/browse/USERGRID-1291
// throw exception so that we do not cache
// TODO: determine how application name can intermittently be null
if ( app != null && app.getName() == null ) {
throw new RuntimeException( "Name is null for application " + appId, throwable );
}
if ( app == null && !isBootstrapping ) {
throw new RuntimeException( "Error getting application " + appId, throwable );
} // else keyspace is missing because setup/bootstrap not done yet
return entityManager;
}
});
}
private void checkManagementApp(Properties properties) {
int maxRetries = 100;
try {
maxRetries = Integer.parseInt( properties.getProperty( MANAGEMENT_APP_INIT_MAXRETRIES, "100" ));
} catch ( Exception e ) {
logger.error("Error parsing " + MANAGEMENT_APP_INIT_MAXRETRIES + ". Will use " + maxRetries, e );
}
int interval = 1000;
try {
interval = Integer.parseInt( properties.getProperty( MANAGEMENT_APP_INIT_INTERVAL, "1000" ));
} catch ( Exception e ) {
logger.error("Error parsing " + MANAGEMENT_APP_INIT_INTERVAL + ". Will use " + maxRetries, e );
}
// hold up construction until we can access the management app
int retries = 0;
boolean managementAppFound = false;
boolean bootstrapping = false;
Set<Class> seenBefore = new HashSet<>(10);
while ( !managementAppFound && retries++ < maxRetries ) {
try {
// bypass entity manager cache and get managementApp
managementApp = _getEntityManager( getManagementAppId() ).getApplication();
managementAppFound = true;
} catch ( Throwable t ) {
if ( t instanceof CollectionRuntimeException ) {
CollectionRuntimeException cre = (CollectionRuntimeException)t;
if ( cre.isBootstrapping() ) {
// we're bootstrapping, ignore this and continue
bootstrapping = true;
break;
}
}
Throwable cause = t;
// there was an error, be as informative as possible
StringBuilder sb = new StringBuilder();
sb.append(retries).append(": Error (");
if ( t instanceof UncheckedExecutionException ) {
UncheckedExecutionException uee = (UncheckedExecutionException)t;
if ( uee.getCause() instanceof RuntimeException ) {
cause = uee.getCause().getCause();
sb.append(cause.getClass().getSimpleName()).append(") ")
.append(uee.getCause().getMessage());
} else {
cause = uee.getCause();
sb.append(cause.getClass().getSimpleName()).append(") ").append(t.getMessage());
}
} else {
sb.append(t.getCause().getClass().getSimpleName()).append(") ").append(t.getMessage());
}
String msg = sb.toString();
if ( !seenBefore.contains( cause.getClass() ) ) {
logger.error( msg, t);
} else {
logger.error(msg);
}
seenBefore.add( cause.getClass() );
try { Thread.sleep( interval ); } catch (InterruptedException ignored) {}
}
}
if ( !managementAppFound && !bootstrapping ) {
// exception here will prevent WAR from being deployed
throw new RuntimeException( "Unable to get management app after " + retries + " retries" );
}
}
public CounterUtils getCounterUtils() {
return counterUtils;
}
public CassandraService getCassandraService() {
return cassandraService;
}
private void initMgmtAppInternal() {
EntityManager em = getEntityManager(getManagementAppId());
indexService.queueInitializeApplicationIndex(CpNamingUtils.getApplicationScope(getManagementAppId()));
try {
if ( em.getApplication() == null ) {
logger.info("Creating management application");
Map mgmtAppProps = new HashMap<String, Object>();
mgmtAppProps.put(PROPERTY_NAME, CassandraService.MANAGEMENT_APPLICATION);
em.create( getManagementAppId(), TYPE_APPLICATION, mgmtAppProps);
em.getApplication();
}
} catch (Exception ex) {
throw new RuntimeException("Fatal error creating management application", ex);
}
}
private Observable<EntityIdScope> getAllEntitiesObservable(){
return injector.getInstance( Key.get(new TypeLiteral< MigrationDataProvider<EntityIdScope>>(){})).getData();
}
@Override
public EntityManager getEntityManager(UUID applicationId) {
try {
return entityManagers.get( applicationId );
}
catch ( Throwable t ) {
logger.error("Error getting entity manager", t);
}
return _getEntityManager(applicationId);
}
private EntityManager _getEntityManager( UUID applicationId ) {
EntityManager em = new CpEntityManager(
cassandraService,
counterUtils,
indexService,
managerCache,
metricsFactory,
actorSystemFig,
entityManagerFig,
graphManagerFactory,
collectionService,
connectionService,
collectionSettingsFactory,
applicationId,
queueManagerFactory);
return em;
}
@Override
public Entity createApplicationV2(String organizationName, String name) throws Exception {
return createApplicationV2( organizationName, name, null, null, false);
}
@Override
public Entity createApplicationV2(
String orgName, String name, UUID applicationId, Map<String, Object> properties, boolean forMigration) throws Exception {
String appName = buildAppName( orgName, name );
final UUID appId = applicationIdCache.getApplicationId( appName );
if ( appId != null ) {
throw new ApplicationAlreadyExistsException( name );
}
applicationId = applicationId==null ? UUIDGenerator.newTimeUUID() : applicationId;
if (logger.isDebugEnabled()) {
logger.debug("New application orgName {} orgAppName {} id {} ",
orgName, name, applicationId.toString());
}
return initializeApplicationV2( orgName, applicationId, appName, properties, forMigration);
}
private String buildAppName( String organizationName, String name ) {
return StringUtils.lowerCase(name.contains("/") ? name : organizationName + "/" + name);
}
/**
* @return UUID of newly created Entity of type application_info
*/
@Override
public Entity initializeApplicationV2(String organizationName, final UUID applicationId, String name,
Map<String, Object> properties, boolean forMigration) throws Exception {
// Ensure the management application is initialized
initMgmtAppInternal();
// Get entity managers by bypassing the entity manager cache because it expects apps to already exist
final EntityManager managementEm = _getEntityManager( getManagementAppId() );
EntityManager appEm = _getEntityManager(applicationId);
final String appName = buildAppName(organizationName, name);
// check for pre-existing application
if ( lookupApplication( appName ) != null ) {
throw new ApplicationAlreadyExistsException( appName );
}
// Initialize the index for this new application
appEm.initializeIndex();
indexService.queueInitializeApplicationIndex(CpNamingUtils.getApplicationScope(applicationId));
if ( properties == null ) {
properties = new TreeMap<>( CASE_INSENSITIVE_ORDER);
}
properties.put( PROPERTY_NAME, appName );
appEm.create(applicationId, TYPE_APPLICATION, properties);
// only reset roles if this application isn't being migrated (meaning dictionary and role data already exists)
if(!forMigration){
appEm.resetRoles();
}
// create application info entity in the management app
Map<String, Object> appInfoMap = new HashMap<String, Object>() {{
put( PROPERTY_NAME, appName );
put( "org", organizationName );
}};
Entity appInfo;
try {
appInfo = managementEm.create(new SimpleId(applicationId,CpNamingUtils.APPLICATION_INFO), appInfoMap);
} catch (DuplicateUniquePropertyExistsException e) {
throw new ApplicationAlreadyExistsException(appName);
}
// evict app Id from cache
applicationIdCache.evictAppId(appName);
logger.info("Initialized application {}, uuid {}", appName, appInfo.getUuid().toString());
return appInfo;
}
/**
* Delete Application.
*
* <p>The Application Entity is be moved to a Deleted_Applications collection and the
* Application index will be removed.
*
* <p>TODO: add scheduled task that can completely delete all deleted application data.</p>
*
* @param applicationId UUID of Application to be deleted.
*/
@Override
public void deleteApplication(UUID applicationId) throws Exception {
// find application_info for application to delete
migrateAppInfo(applicationId, CpNamingUtils.APPLICATION_INFO, CpNamingUtils.DELETED_APPLICATION_INFOS, CpNamingUtils.DELETED_APPLICATION_INFO).toBlocking()
.lastOrDefault( null );
}
//TODO: return status for restore
@Override
public Entity restoreApplication(UUID applicationId) throws Exception {
// get the deleted_application_info for the deleted app
return (Entity) migrateAppInfo( applicationId, CpNamingUtils.DELETED_APPLICATION_INFO,
CpNamingUtils.APPLICATION_INFOS , CpNamingUtils.APPLICATION_INFO ).lastOrDefault( null )
.map( appInfo -> {
//start the index rebuild
final ReIndexRequestBuilder builder = reIndexService.getBuilder().withApplicationId( applicationId );
reIndexService.rebuildIndex( builder );
//load the entity
final EntityManager managementEm = getEntityManager( getManagementAppId() );
try {
return managementEm.get( new SimpleEntityRef( CpNamingUtils.APPLICATION_INFO, applicationId ) );
}
catch ( Exception e ) {
logger.error( "Failed to get entity", e );
throw new RuntimeException( e );
}
} )
.toBlocking().lastOrDefault(null);
}
// @Override
/**
* Migrate the application from one type to another. Used in delete and restore
* @param applicationUUID The applicationUUID
* @param deleteTypeName The type to use on the delete
* @param createCollectionName The name of the collection to write the entity into
* @param createTypeName The type to use on the create
* @return
* @throws Exception
*/
private Observable migrateAppInfo(final UUID applicationUUID, final String deleteTypeName, final String createCollectionName, final String createTypeName ) throws Exception {
final ApplicationScope managementAppScope = CpNamingUtils.getApplicationScope(CpNamingUtils.MANAGEMENT_APPLICATION_ID);
final EntityManager managementEm = getEntityManager(CpNamingUtils.MANAGEMENT_APPLICATION_ID);
//the application id we will be removing
final Id deleteApplicationId = new SimpleId(applicationUUID, deleteTypeName );
//the application id we'll be creating
final Id createApplicationId = new SimpleId( applicationUUID, createTypeName );
//the application scope of the deleted app to clean it's index
final ApplicationScope deleteApplicationScope = new ApplicationScopeImpl(deleteApplicationId);
Entity oldAppEntity = managementEm.get(new SimpleEntityRef( deleteTypeName, applicationUUID));
if(oldAppEntity == null){
throw new EntityNotFoundException( String.format("Could not find application with UUID '%s'", applicationUUID) );
}
// ensure that there is not already a deleted app with the same name
final EntityRef alias = managementEm.getAlias( createCollectionName, oldAppEntity.getName() );
if ( alias != null ) {
throw new ConflictException( "Cannot delete app with same name as already deleted app" );
}
// make a copy of the app to delete application_info entity
// and put it in a deleted_application_info collection
final Entity newAppEntity =
managementEm.create( new SimpleId( applicationUUID, createTypeName ), oldAppEntity.getProperties() );
// copy its connections too
final Set<String> connectionTypes = managementEm.getConnectionTypes( oldAppEntity );
Observable copyConnections = Observable.from( connectionTypes ).doOnNext( connType -> {
try {
final Results connResults =
managementEm.getTargetEntities( oldAppEntity, connType, null, Query.Level.ALL_PROPERTIES );
connResults.getEntities().forEach( entity -> {
try {
managementEm.createConnection( newAppEntity, connType, entity );
}
catch ( Exception e ) {
throw new RuntimeException( e );
}
} );
}
catch ( Exception e ) {
throw new RuntimeException( e );
}
} );
final Id managementAppId = CpNamingUtils.getManagementApplicationId();
final EntityIndex aei = getManagementIndex();
final GraphManager managementGraphManager = managerCache.getGraphManager(managementAppScope);
final Edge createEdge = CpNamingUtils.createCollectionEdge(managementAppId, createCollectionName, createApplicationId);
final Observable createNodeGraph = managementGraphManager.writeEdge(createEdge);
final Observable deleteAppFromIndex = aei.deleteApplication();
return Observable
.merge( copyConnections, createNodeGraph, deleteAppFromIndex )
.doOnCompleted( () -> {
try {
if ( oldAppEntity != null ) {
managementEm.delete( oldAppEntity );
applicationIdCache.evictAppId( oldAppEntity.getName() );
}
EntityIndex ei = getManagementIndex();
ei.refreshAsync().toBlocking().last();
} catch (Exception e) {
throw new RuntimeException(e);
}
} );
}
@Override
public UUID importApplication(
String organization, UUID applicationId,
String name, Map<String, Object> properties) throws Exception {
throw new UnsupportedOperationException("Not supported yet.");
}
public UUID lookupApplication(String orgAppName ) throws Exception {
return applicationIdCache.getApplicationId(orgAppName);
}
@Override
public Map<String, UUID> getApplications() throws Exception {
return getApplications( CpNamingUtils.getEdgeTypeFromCollectionName( CpNamingUtils.APPLICATION_INFOS ) );
}
@Override
public Map<String, UUID> getDeletedApplications() throws Exception {
return getApplications( CpNamingUtils.getEdgeTypeFromCollectionName( CpNamingUtils.DELETED_APPLICATION_INFOS ) );
}
private Map<String, UUID> getApplications(final String edgeType) throws Exception {
ApplicationScope appScope =
CpNamingUtils.getApplicationScope(CpNamingUtils.MANAGEMENT_APPLICATION_ID);
GraphManager gm = managerCache.getGraphManager(appScope);
EntityManager managementEM = getEntityManager(CpNamingUtils.MANAGEMENT_APPLICATION_ID);
Application managementApp = managementEM.getApplication();
if( managementApp == null ) {
throw new RuntimeException("Management App "
+ CpNamingUtils.MANAGEMENT_APPLICATION_ID + " should never be null");
}
Id managementId = new SimpleId( managementApp.getUuid(), managementApp.getType() );
if (logger.isDebugEnabled()) {
logger.debug("getApplications(): Loading edges of edgeType {} from {}:{}",
edgeType, managementId.getType(), managementId.getUuid());
}
Observable<MarkedEdge> edges = gm.loadEdgesFromSource(
new SimpleSearchByEdgeType( managementId, edgeType, Long.MAX_VALUE, SearchByEdgeType.Order.DESCENDING,
Optional.<Edge>absent() ) );
final EntityCollectionManager ecm = managerCache.getEntityCollectionManager( appScope );
//buffer our edges and batch fetch the app infos for faster I/O
return edges.map( edge -> {
return edge.getTargetNode();
} ).buffer( 100 ).flatMap( entityIds -> {
return ecm.load( entityIds );
} )
.flatMap( entitySet -> Observable.from( entitySet.getEntities() ) )
//collect all the app infos into a single map for return
.collect( () -> new HashMap<String, UUID>(), ( appMap, entity ) -> {
if ( !entity.getEntity().isPresent() ) {
return;
}
final org.apache.usergrid.persistence.model.entity.Entity entityData =
entity.getEntity().get();
final UUID applicationId = entity.getId().getUuid();
final String applicationName = ( String ) entityData.getField( PROPERTY_NAME ).getValue();
appMap.put( applicationName , applicationId );
} ).toBlocking().last();
}
@Override
public void setup() throws Exception {
getSetup().initSchema(false);
lockManager.setup();
}
@Override
public void bootstrap() throws Exception {
// Always make sure the database schema is initialized
getSetup().initSchema(false);
// Roll the new 2.x Migration classes to the latest version supported
getSetup().runDataMigration();
// Make sure the management application is created
initMgmtAppInternal();
// Ensure management app is initialized
getSetup().initMgmtApp();
}
@Override
public Map<String, String> getServiceProperties() {
Map<String, String> props = new HashMap<String,String>();
EntityManager em = getEntityManager(getManagementAppId());
Query q = Query.fromQL("select *");
Results results = null;
try {
results = em.searchCollection( em.getApplicationRef(), "propertymaps", q);
} catch (Exception ex) {
logger.error("Error getting system properties", ex);
}
if ( results == null || results.isEmpty() ) {
return props;
}
org.apache.usergrid.persistence.Entity e = results.getEntity();
for ( String key : e.getProperties().keySet() ) {
props.put( key, props.get(key).toString() );
}
return props;
}
@Override
public boolean updateServiceProperties(Map<String, String> properties) {
EntityManager em = getEntityManager(getManagementAppId());
Query q = Query.fromQL("select *");
Results results = null;
try {
results = em.searchCollection( em.getApplicationRef(), "propertymaps", q);
} catch (Exception ex) {
logger.error("Error getting system properties", ex);
return false;
}
org.apache.usergrid.persistence.Entity propsEntity = null;
if ( !results.isEmpty() ) {
propsEntity = results.getEntity();
} else {
propsEntity = EntityFactory.newEntity( UUIDUtils.newTimeUUID(), "propertymap");
}
// intentionally going only one-level deep into fields and treating all
// values as strings because that is all we need for service properties
for ( String key : properties.keySet() ) {
propsEntity.setProperty(key, properties.get(key).toString());
}
try {
em.update( propsEntity );
} catch (Exception ex) {
logger.error("Error updating service properties", ex);
return false;
}
return true;
}
@Override
public boolean setServiceProperty(final String name, final String value) {
return updateServiceProperties(new HashMap<String, String>() {{
put(name, value);
}});
}
@Override
public boolean deleteServiceProperty(String name) {
EntityManager em = getEntityManager(getManagementAppId());
Query q = Query.fromQL( "select *");
Results results = null;
try {
results = em.searchCollection( em.getApplicationRef(), "propertymaps", q);
} catch (Exception ex) {
logger.error("Error getting service property for delete of property: {}", name, ex);
return false;
}
org.apache.usergrid.persistence.Entity propsEntity = null;
if ( !results.isEmpty() ) {
propsEntity = results.getEntity();
} else {
propsEntity = EntityFactory.newEntity( UUIDUtils.newTimeUUID(), "propertymap");
}
try {
((AbstractEntity)propsEntity).clearDataset( name );
em.update( propsEntity );
} catch (Exception ex) {
logger.error("Error deleting service property orgAppName: {}", name, ex);
return false;
}
return true;
}
public ApplicationContext getApplicationContext() {
return applicationContext;
}
@Override
public void setApplicationContext( ApplicationContext applicationContext ) throws BeansException {
this.applicationContext = applicationContext;
// try {
// setup();
// } catch (Exception ex) {
// logger.error("Error setting up EMF", ex);
// }
}
@Override
public long performEntityCount() {
//TODO, this really needs to be a task that writes this data somewhere since this will get
//progressively slower as the system expands
return (Long) getAllEntitiesObservable().countLong().toBlocking().last();
}
@Override
public UUID getManagementAppId() {
return CpNamingUtils.MANAGEMENT_APPLICATION_ID;
}
@Override
public EntityManager getManagementEntityManager() {
return getEntityManager(CpNamingUtils.MANAGEMENT_APPLICATION_ID);
}
/**
* Gets the setup.
* @return Setup helper
*/
public Setup getSetup() {
if ( setup == null ) {
setup = new CpSetup( this, cassandraService, injector );
}
return setup;
}
/**
* TODO, these 3 methods are super janky. During refactoring we should clean this model up
*/
public EntityIndex.IndexRefreshCommandInfo refreshIndex(UUID applicationId) {
return getEntityManager(applicationId).refreshIndex();
}
private EntityIndex getManagementIndex() {
return
managerCache.getEntityIndex( // management app
CpNamingUtils.getApplicationScope(getManagementAppId()));
}
@Override
public void flushEntityManagerCaches() {
managerCache.invalidate();
applicationIdCache.evictAll();
Map<UUID, EntityManager> entityManagersMap = entityManagers.asMap();
for ( UUID appUuid : entityManagersMap.keySet() ) {
EntityManager em = entityManagersMap.get(appUuid);
em.flushManagerCaches();
}
}
@Override
public Health getEntityStoreHealth() {
// could use any collection scope here, does not matter
EntityCollectionManager ecm = managerCache.getEntityCollectionManager(
new ApplicationScopeImpl( new SimpleId( CpNamingUtils.MANAGEMENT_APPLICATION_ID, "application" ) ) );
return ecm.getHealth();
}
@Override
public Health getIndexHealth() {
return getManagementIndex().getIndexHealth();
}
@Override
public void initializeManagementIndex(){
getManagementIndex().initialize();
}
}
|
googleapis/google-cloud-java | 36,696 | java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/ListEntitlementsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/oracledatabase/v1/oracledatabase.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.oracledatabase.v1;
/**
*
*
* <pre>
* The response for `Entitlement.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListEntitlementsResponse}
*/
public final class ListEntitlementsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.ListEntitlementsResponse)
ListEntitlementsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntitlementsResponse.newBuilder() to construct.
private ListEntitlementsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntitlementsResponse() {
entitlements_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntitlementsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListEntitlementsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListEntitlementsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.class,
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.Builder.class);
}
public static final int ENTITLEMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.oracledatabase.v1.Entitlement> entitlements_;
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.oracledatabase.v1.Entitlement> getEntitlementsList() {
return entitlements_;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.oracledatabase.v1.EntitlementOrBuilder>
getEntitlementsOrBuilderList() {
return entitlements_;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
@java.lang.Override
public int getEntitlementsCount() {
return entitlements_.size();
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.Entitlement getEntitlements(int index) {
return entitlements_.get(index);
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.EntitlementOrBuilder getEntitlementsOrBuilder(
int index) {
return entitlements_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < entitlements_.size(); i++) {
output.writeMessage(1, entitlements_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < entitlements_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entitlements_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.oracledatabase.v1.ListEntitlementsResponse)) {
return super.equals(obj);
}
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse other =
(com.google.cloud.oracledatabase.v1.ListEntitlementsResponse) obj;
if (!getEntitlementsList().equals(other.getEntitlementsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEntitlementsCount() > 0) {
hash = (37 * hash) + ENTITLEMENTS_FIELD_NUMBER;
hash = (53 * hash) + getEntitlementsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for `Entitlement.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListEntitlementsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.ListEntitlementsResponse)
com.google.cloud.oracledatabase.v1.ListEntitlementsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListEntitlementsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListEntitlementsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.class,
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.Builder.class);
}
// Construct using com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (entitlementsBuilder_ == null) {
entitlements_ = java.util.Collections.emptyList();
} else {
entitlements_ = null;
entitlementsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListEntitlementsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListEntitlementsResponse getDefaultInstanceForType() {
return com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListEntitlementsResponse build() {
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListEntitlementsResponse buildPartial() {
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse result =
new com.google.cloud.oracledatabase.v1.ListEntitlementsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.oracledatabase.v1.ListEntitlementsResponse result) {
if (entitlementsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
entitlements_ = java.util.Collections.unmodifiableList(entitlements_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.entitlements_ = entitlements_;
} else {
result.entitlements_ = entitlementsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.oracledatabase.v1.ListEntitlementsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.oracledatabase.v1.ListEntitlementsResponse) {
return mergeFrom((com.google.cloud.oracledatabase.v1.ListEntitlementsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.oracledatabase.v1.ListEntitlementsResponse other) {
if (other == com.google.cloud.oracledatabase.v1.ListEntitlementsResponse.getDefaultInstance())
return this;
if (entitlementsBuilder_ == null) {
if (!other.entitlements_.isEmpty()) {
if (entitlements_.isEmpty()) {
entitlements_ = other.entitlements_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEntitlementsIsMutable();
entitlements_.addAll(other.entitlements_);
}
onChanged();
}
} else {
if (!other.entitlements_.isEmpty()) {
if (entitlementsBuilder_.isEmpty()) {
entitlementsBuilder_.dispose();
entitlementsBuilder_ = null;
entitlements_ = other.entitlements_;
bitField0_ = (bitField0_ & ~0x00000001);
entitlementsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEntitlementsFieldBuilder()
: null;
} else {
entitlementsBuilder_.addAllMessages(other.entitlements_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.oracledatabase.v1.Entitlement m =
input.readMessage(
com.google.cloud.oracledatabase.v1.Entitlement.parser(), extensionRegistry);
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
entitlements_.add(m);
} else {
entitlementsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.oracledatabase.v1.Entitlement> entitlements_ =
java.util.Collections.emptyList();
private void ensureEntitlementsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
entitlements_ =
new java.util.ArrayList<com.google.cloud.oracledatabase.v1.Entitlement>(entitlements_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.Entitlement,
com.google.cloud.oracledatabase.v1.Entitlement.Builder,
com.google.cloud.oracledatabase.v1.EntitlementOrBuilder>
entitlementsBuilder_;
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.Entitlement> getEntitlementsList() {
if (entitlementsBuilder_ == null) {
return java.util.Collections.unmodifiableList(entitlements_);
} else {
return entitlementsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public int getEntitlementsCount() {
if (entitlementsBuilder_ == null) {
return entitlements_.size();
} else {
return entitlementsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.Entitlement getEntitlements(int index) {
if (entitlementsBuilder_ == null) {
return entitlements_.get(index);
} else {
return entitlementsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder setEntitlements(
int index, com.google.cloud.oracledatabase.v1.Entitlement value) {
if (entitlementsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntitlementsIsMutable();
entitlements_.set(index, value);
onChanged();
} else {
entitlementsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder setEntitlements(
int index, com.google.cloud.oracledatabase.v1.Entitlement.Builder builderForValue) {
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
entitlements_.set(index, builderForValue.build());
onChanged();
} else {
entitlementsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder addEntitlements(com.google.cloud.oracledatabase.v1.Entitlement value) {
if (entitlementsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntitlementsIsMutable();
entitlements_.add(value);
onChanged();
} else {
entitlementsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder addEntitlements(
int index, com.google.cloud.oracledatabase.v1.Entitlement value) {
if (entitlementsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntitlementsIsMutable();
entitlements_.add(index, value);
onChanged();
} else {
entitlementsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder addEntitlements(
com.google.cloud.oracledatabase.v1.Entitlement.Builder builderForValue) {
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
entitlements_.add(builderForValue.build());
onChanged();
} else {
entitlementsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder addEntitlements(
int index, com.google.cloud.oracledatabase.v1.Entitlement.Builder builderForValue) {
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
entitlements_.add(index, builderForValue.build());
onChanged();
} else {
entitlementsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder addAllEntitlements(
java.lang.Iterable<? extends com.google.cloud.oracledatabase.v1.Entitlement> values) {
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entitlements_);
onChanged();
} else {
entitlementsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder clearEntitlements() {
if (entitlementsBuilder_ == null) {
entitlements_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
entitlementsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public Builder removeEntitlements(int index) {
if (entitlementsBuilder_ == null) {
ensureEntitlementsIsMutable();
entitlements_.remove(index);
onChanged();
} else {
entitlementsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.Entitlement.Builder getEntitlementsBuilder(
int index) {
return getEntitlementsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.EntitlementOrBuilder getEntitlementsOrBuilder(
int index) {
if (entitlementsBuilder_ == null) {
return entitlements_.get(index);
} else {
return entitlementsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public java.util.List<? extends com.google.cloud.oracledatabase.v1.EntitlementOrBuilder>
getEntitlementsOrBuilderList() {
if (entitlementsBuilder_ != null) {
return entitlementsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(entitlements_);
}
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.Entitlement.Builder addEntitlementsBuilder() {
return getEntitlementsFieldBuilder()
.addBuilder(com.google.cloud.oracledatabase.v1.Entitlement.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.Entitlement.Builder addEntitlementsBuilder(
int index) {
return getEntitlementsFieldBuilder()
.addBuilder(index, com.google.cloud.oracledatabase.v1.Entitlement.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Entitlements
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.Entitlement entitlements = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.Entitlement.Builder>
getEntitlementsBuilderList() {
return getEntitlementsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.Entitlement,
com.google.cloud.oracledatabase.v1.Entitlement.Builder,
com.google.cloud.oracledatabase.v1.EntitlementOrBuilder>
getEntitlementsFieldBuilder() {
if (entitlementsBuilder_ == null) {
entitlementsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.Entitlement,
com.google.cloud.oracledatabase.v1.Entitlement.Builder,
com.google.cloud.oracledatabase.v1.EntitlementOrBuilder>(
entitlements_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
entitlements_ = null;
}
return entitlementsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.ListEntitlementsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.ListEntitlementsResponse)
private static final com.google.cloud.oracledatabase.v1.ListEntitlementsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.oracledatabase.v1.ListEntitlementsResponse();
}
public static com.google.cloud.oracledatabase.v1.ListEntitlementsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntitlementsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEntitlementsResponse>() {
@java.lang.Override
public ListEntitlementsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntitlementsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntitlementsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListEntitlementsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,791 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/MigrationConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* The configuration related to the migration to Dataplex applied to an
* organization or project.
* It is the response message for
* [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] and
* [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.MigrationConfig}
*/
public final class MigrationConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.MigrationConfig)
MigrationConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use MigrationConfig.newBuilder() to construct.
private MigrationConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MigrationConfig() {
tagTemplateMigration_ = 0;
catalogUiExperience_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MigrationConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_MigrationConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_MigrationConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.MigrationConfig.class,
com.google.cloud.datacatalog.v1.MigrationConfig.Builder.class);
}
private int bitField0_;
public static final int TAG_TEMPLATE_MIGRATION_FIELD_NUMBER = 1;
private int tagTemplateMigration_ = 0;
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @return The enum numeric value on the wire for tagTemplateMigration.
*/
@java.lang.Override
public int getTagTemplateMigrationValue() {
return tagTemplateMigration_;
}
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @return The tagTemplateMigration.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplateMigration getTagTemplateMigration() {
com.google.cloud.datacatalog.v1.TagTemplateMigration result =
com.google.cloud.datacatalog.v1.TagTemplateMigration.forNumber(tagTemplateMigration_);
return result == null
? com.google.cloud.datacatalog.v1.TagTemplateMigration.UNRECOGNIZED
: result;
}
public static final int CATALOG_UI_EXPERIENCE_FIELD_NUMBER = 2;
private int catalogUiExperience_ = 0;
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @return The enum numeric value on the wire for catalogUiExperience.
*/
@java.lang.Override
public int getCatalogUiExperienceValue() {
return catalogUiExperience_;
}
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @return The catalogUiExperience.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.CatalogUIExperience getCatalogUiExperience() {
com.google.cloud.datacatalog.v1.CatalogUIExperience result =
com.google.cloud.datacatalog.v1.CatalogUIExperience.forNumber(catalogUiExperience_);
return result == null
? com.google.cloud.datacatalog.v1.CatalogUIExperience.UNRECOGNIZED
: result;
}
public static final int TEMPLATE_MIGRATION_ENABLED_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp templateMigrationEnabledTime_;
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*
* @return Whether the templateMigrationEnabledTime field is set.
*/
@java.lang.Override
public boolean hasTemplateMigrationEnabledTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*
* @return The templateMigrationEnabledTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getTemplateMigrationEnabledTime() {
return templateMigrationEnabledTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: templateMigrationEnabledTime_;
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getTemplateMigrationEnabledTimeOrBuilder() {
return templateMigrationEnabledTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: templateMigrationEnabledTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (tagTemplateMigration_
!= com.google.cloud.datacatalog.v1.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, tagTemplateMigration_);
}
if (catalogUiExperience_
!= com.google.cloud.datacatalog.v1.CatalogUIExperience.CATALOG_UI_EXPERIENCE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, catalogUiExperience_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getTemplateMigrationEnabledTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (tagTemplateMigration_
!= com.google.cloud.datacatalog.v1.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, tagTemplateMigration_);
}
if (catalogUiExperience_
!= com.google.cloud.datacatalog.v1.CatalogUIExperience.CATALOG_UI_EXPERIENCE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, catalogUiExperience_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, getTemplateMigrationEnabledTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.MigrationConfig)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.MigrationConfig other =
(com.google.cloud.datacatalog.v1.MigrationConfig) obj;
if (tagTemplateMigration_ != other.tagTemplateMigration_) return false;
if (catalogUiExperience_ != other.catalogUiExperience_) return false;
if (hasTemplateMigrationEnabledTime() != other.hasTemplateMigrationEnabledTime()) return false;
if (hasTemplateMigrationEnabledTime()) {
if (!getTemplateMigrationEnabledTime().equals(other.getTemplateMigrationEnabledTime()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TAG_TEMPLATE_MIGRATION_FIELD_NUMBER;
hash = (53 * hash) + tagTemplateMigration_;
hash = (37 * hash) + CATALOG_UI_EXPERIENCE_FIELD_NUMBER;
hash = (53 * hash) + catalogUiExperience_;
if (hasTemplateMigrationEnabledTime()) {
hash = (37 * hash) + TEMPLATE_MIGRATION_ENABLED_TIME_FIELD_NUMBER;
hash = (53 * hash) + getTemplateMigrationEnabledTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.MigrationConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datacatalog.v1.MigrationConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The configuration related to the migration to Dataplex applied to an
* organization or project.
* It is the response message for
* [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] and
* [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.MigrationConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.MigrationConfig)
com.google.cloud.datacatalog.v1.MigrationConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_MigrationConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_MigrationConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.MigrationConfig.class,
com.google.cloud.datacatalog.v1.MigrationConfig.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.MigrationConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTemplateMigrationEnabledTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tagTemplateMigration_ = 0;
catalogUiExperience_ = 0;
templateMigrationEnabledTime_ = null;
if (templateMigrationEnabledTimeBuilder_ != null) {
templateMigrationEnabledTimeBuilder_.dispose();
templateMigrationEnabledTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_MigrationConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.MigrationConfig getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.MigrationConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.MigrationConfig build() {
com.google.cloud.datacatalog.v1.MigrationConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.MigrationConfig buildPartial() {
com.google.cloud.datacatalog.v1.MigrationConfig result =
new com.google.cloud.datacatalog.v1.MigrationConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.MigrationConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tagTemplateMigration_ = tagTemplateMigration_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.catalogUiExperience_ = catalogUiExperience_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.templateMigrationEnabledTime_ =
templateMigrationEnabledTimeBuilder_ == null
? templateMigrationEnabledTime_
: templateMigrationEnabledTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.MigrationConfig) {
return mergeFrom((com.google.cloud.datacatalog.v1.MigrationConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.MigrationConfig other) {
if (other == com.google.cloud.datacatalog.v1.MigrationConfig.getDefaultInstance())
return this;
if (other.tagTemplateMigration_ != 0) {
setTagTemplateMigrationValue(other.getTagTemplateMigrationValue());
}
if (other.catalogUiExperience_ != 0) {
setCatalogUiExperienceValue(other.getCatalogUiExperienceValue());
}
if (other.hasTemplateMigrationEnabledTime()) {
mergeTemplateMigrationEnabledTime(other.getTemplateMigrationEnabledTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
tagTemplateMigration_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
catalogUiExperience_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
input.readMessage(
getTemplateMigrationEnabledTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int tagTemplateMigration_ = 0;
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @return The enum numeric value on the wire for tagTemplateMigration.
*/
@java.lang.Override
public int getTagTemplateMigrationValue() {
return tagTemplateMigration_;
}
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @param value The enum numeric value on the wire for tagTemplateMigration to set.
* @return This builder for chaining.
*/
public Builder setTagTemplateMigrationValue(int value) {
tagTemplateMigration_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @return The tagTemplateMigration.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplateMigration getTagTemplateMigration() {
com.google.cloud.datacatalog.v1.TagTemplateMigration result =
com.google.cloud.datacatalog.v1.TagTemplateMigration.forNumber(tagTemplateMigration_);
return result == null
? com.google.cloud.datacatalog.v1.TagTemplateMigration.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @param value The tagTemplateMigration to set.
* @return This builder for chaining.
*/
public Builder setTagTemplateMigration(
com.google.cloud.datacatalog.v1.TagTemplateMigration value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tagTemplateMigration_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Opt-in status for the migration of Tag Templates to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.TagTemplateMigration tag_template_migration = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTagTemplateMigration() {
bitField0_ = (bitField0_ & ~0x00000001);
tagTemplateMigration_ = 0;
onChanged();
return this;
}
private int catalogUiExperience_ = 0;
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @return The enum numeric value on the wire for catalogUiExperience.
*/
@java.lang.Override
public int getCatalogUiExperienceValue() {
return catalogUiExperience_;
}
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @param value The enum numeric value on the wire for catalogUiExperience to set.
* @return This builder for chaining.
*/
public Builder setCatalogUiExperienceValue(int value) {
catalogUiExperience_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @return The catalogUiExperience.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.CatalogUIExperience getCatalogUiExperience() {
com.google.cloud.datacatalog.v1.CatalogUIExperience result =
com.google.cloud.datacatalog.v1.CatalogUIExperience.forNumber(catalogUiExperience_);
return result == null
? com.google.cloud.datacatalog.v1.CatalogUIExperience.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @param value The catalogUiExperience to set.
* @return This builder for chaining.
*/
public Builder setCatalogUiExperience(
com.google.cloud.datacatalog.v1.CatalogUIExperience value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
catalogUiExperience_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Opt-in status for the UI switch to Dataplex.
* </pre>
*
* <code>.google.cloud.datacatalog.v1.CatalogUIExperience catalog_ui_experience = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearCatalogUiExperience() {
bitField0_ = (bitField0_ & ~0x00000002);
catalogUiExperience_ = 0;
onChanged();
return this;
}
private com.google.protobuf.Timestamp templateMigrationEnabledTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
templateMigrationEnabledTimeBuilder_;
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*
* @return Whether the templateMigrationEnabledTime field is set.
*/
public boolean hasTemplateMigrationEnabledTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*
* @return The templateMigrationEnabledTime.
*/
public com.google.protobuf.Timestamp getTemplateMigrationEnabledTime() {
if (templateMigrationEnabledTimeBuilder_ == null) {
return templateMigrationEnabledTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: templateMigrationEnabledTime_;
} else {
return templateMigrationEnabledTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public Builder setTemplateMigrationEnabledTime(com.google.protobuf.Timestamp value) {
if (templateMigrationEnabledTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
templateMigrationEnabledTime_ = value;
} else {
templateMigrationEnabledTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public Builder setTemplateMigrationEnabledTime(
com.google.protobuf.Timestamp.Builder builderForValue) {
if (templateMigrationEnabledTimeBuilder_ == null) {
templateMigrationEnabledTime_ = builderForValue.build();
} else {
templateMigrationEnabledTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public Builder mergeTemplateMigrationEnabledTime(com.google.protobuf.Timestamp value) {
if (templateMigrationEnabledTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& templateMigrationEnabledTime_ != null
&& templateMigrationEnabledTime_
!= com.google.protobuf.Timestamp.getDefaultInstance()) {
getTemplateMigrationEnabledTimeBuilder().mergeFrom(value);
} else {
templateMigrationEnabledTime_ = value;
}
} else {
templateMigrationEnabledTimeBuilder_.mergeFrom(value);
}
if (templateMigrationEnabledTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public Builder clearTemplateMigrationEnabledTime() {
bitField0_ = (bitField0_ & ~0x00000004);
templateMigrationEnabledTime_ = null;
if (templateMigrationEnabledTimeBuilder_ != null) {
templateMigrationEnabledTimeBuilder_.dispose();
templateMigrationEnabledTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getTemplateMigrationEnabledTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTemplateMigrationEnabledTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTemplateMigrationEnabledTimeOrBuilder() {
if (templateMigrationEnabledTimeBuilder_ != null) {
return templateMigrationEnabledTimeBuilder_.getMessageOrBuilder();
} else {
return templateMigrationEnabledTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: templateMigrationEnabledTime_;
}
}
/**
*
*
* <pre>
* The time when the Tag Template migration was enabled.
* If the Tag Template migration is not enabled, this field is not set.
* </pre>
*
* <code>.google.protobuf.Timestamp template_migration_enabled_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getTemplateMigrationEnabledTimeFieldBuilder() {
if (templateMigrationEnabledTimeBuilder_ == null) {
templateMigrationEnabledTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getTemplateMigrationEnabledTime(), getParentForChildren(), isClean());
templateMigrationEnabledTime_ = null;
}
return templateMigrationEnabledTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.MigrationConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.MigrationConfig)
private static final com.google.cloud.datacatalog.v1.MigrationConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.MigrationConfig();
}
public static com.google.cloud.datacatalog.v1.MigrationConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MigrationConfig> PARSER =
new com.google.protobuf.AbstractParser<MigrationConfig>() {
@java.lang.Override
public MigrationConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MigrationConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MigrationConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.MigrationConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-search | 34,332 | integrationtest/mapper/orm/src/test/java/org/hibernate/search/integrationtest/mapper/orm/automaticindexing/association/bytype/onetomany/AutomaticIndexingOneToManyCollectionBaseIT.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.onetomany;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import jakarta.persistence.Basic;
import jakarta.persistence.CollectionTable;
import jakarta.persistence.Column;
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embedded;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.JoinTable;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany;
import jakarta.persistence.OneToOne;
import jakarta.persistence.OrderBy;
import jakarta.persistence.OrderColumn;
import jakarta.persistence.Transient;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.AbstractAutomaticIndexingMultiValuedAssociationBaseIT;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.ContainerPrimitives;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.accessor.MultiValuedPropertyAccessor;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.accessor.PropertyAccessor;
import org.hibernate.search.mapper.pojo.automaticindexing.ReindexOnUpdate;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.AssociationInverseSide;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
import org.hibernate.search.util.impl.integrationtest.mapper.orm.OrmSetupHelper;
/**
* Test automatic indexing caused by multi-valued association updates
* or by updates of associated (contained) entities,
* with a {@code @OneToMany Collection} association owned by the contained side.
*/
public class AutomaticIndexingOneToManyCollectionBaseIT
extends AbstractAutomaticIndexingMultiValuedAssociationBaseIT<
AutomaticIndexingOneToManyCollectionBaseIT.IndexedEntity,
AutomaticIndexingOneToManyCollectionBaseIT.ContainingEntity,
AutomaticIndexingOneToManyCollectionBaseIT.ContainingEmbeddable,
AutomaticIndexingOneToManyCollectionBaseIT.ContainedEntity,
AutomaticIndexingOneToManyCollectionBaseIT.ContainedEmbeddable,
Collection<AutomaticIndexingOneToManyCollectionBaseIT.ContainedEntity>> {
public AutomaticIndexingOneToManyCollectionBaseIT() {
super( IndexedEntity.PRIMITIVES, ContainingEntity.PRIMITIVES, ContainingEmbeddable.PRIMITIVES,
ContainedEntity.PRIMITIVES, ContainedEmbeddable.PRIMITIVES );
}
@Override
protected boolean isAssociationMultiValuedOnContainedSide() {
return false;
}
@Override
protected boolean isAssociationOwnedByContainedSide() {
return true;
}
@Override
protected void preDelete(OrmSetupHelper.SetupContext setupContext) {
// We're simulating a mappedBy with two associations (see comments in annotation mapping),
// so we need to clear one side before we can delete entities.
setupContext.dataClearing( config -> config.manualDatabaseCleanup( session -> {
session.createQuery(
"select c from containing c where size(c.containedElementCollectionAssociationsIndexedEmbedded) > 0 or size(c.containedElementCollectionAssociationsNonIndexedEmbedded) > 0 ",
ContainingEntity.class
).getResultList()
.forEach( containing -> {
containing.getContainedElementCollectionAssociationsIndexedEmbedded().clear();
containing.getContainedElementCollectionAssociationsNonIndexedEmbedded().clear();
} );
} ) );
}
@Entity(name = "containing")
public static class ContainingEntity {
@Id
private Integer id;
private String nonIndexedField;
@OneToOne
private ContainingEntity parent;
@OneToOne(mappedBy = "parent")
@IndexedEmbedded(includePaths = {
"containedIndexedEmbedded.indexedField",
"containedIndexedEmbedded.indexedElementCollectionField",
"containedIndexedEmbedded.containedDerivedField",
"containedIndexedEmbeddedShallowReindexOnUpdate.indexedField",
"containedIndexedEmbeddedShallowReindexOnUpdate.indexedElementCollectionField",
"containedIndexedEmbeddedShallowReindexOnUpdate.containedDerivedField",
"containedIndexedEmbeddedNoReindexOnUpdate.indexedField",
"containedIndexedEmbeddedNoReindexOnUpdate.indexedElementCollectionField",
"containedIndexedEmbeddedNoReindexOnUpdate.containedDerivedField",
"containedIndexedEmbeddedWithCast.indexedField",
"embeddedAssociations.containedIndexedEmbedded.indexedField",
"embeddedAssociations.containedIndexedEmbedded.indexedElementCollectionField",
"embeddedAssociations.containedIndexedEmbedded.containedDerivedField",
"containedElementCollectionAssociationsIndexedEmbedded.indexedField",
"containedElementCollectionAssociationsIndexedEmbedded.indexedElementCollectionField",
"containedElementCollectionAssociationsIndexedEmbedded.containedDerivedField",
"crossEntityDerivedField"
})
private ContainingEntity child;
@OneToMany(mappedBy = "containingAsIndexedEmbedded")
@OrderBy("id asc") // Make sure the iteration order is predictable
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
private Collection<ContainedEntity> containedIndexedEmbedded = new ArrayList<>();
@OneToMany(mappedBy = "containingAsNonIndexedEmbedded")
@OrderBy("id asc") // Make sure the iteration order is predictable
private Collection<ContainedEntity> containedNonIndexedEmbedded = new ArrayList<>();
@OneToMany(mappedBy = "containingAsIndexedEmbeddedShallowReindexOnUpdate")
@OrderBy("id asc") // Make sure the iteration order is predictable
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
@IndexingDependency(reindexOnUpdate = ReindexOnUpdate.SHALLOW)
private Collection<ContainedEntity> containedIndexedEmbeddedShallowReindexOnUpdate = new ArrayList<>();
@OneToMany(mappedBy = "containingAsIndexedEmbeddedNoReindexOnUpdate")
@OrderBy("id asc") // Make sure the iteration order is predictable
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
@IndexingDependency(reindexOnUpdate = ReindexOnUpdate.NO)
private Collection<ContainedEntity> containedIndexedEmbeddedNoReindexOnUpdate = new ArrayList<>();
@OneToMany(mappedBy = "containingAsUsedInCrossEntityDerivedProperty")
@OrderBy("id asc") // Make sure the iteration order is predictable
private Collection<ContainedEntity> containedUsedInCrossEntityDerivedProperty = new ArrayList<>();
@OneToMany(mappedBy = "containingAsIndexedEmbeddedWithCast", targetEntity = ContainedEntity.class)
@OrderBy("id asc") // Make sure the iteration order is predictable
@IndexedEmbedded(includePaths = "indexedField", targetType = ContainedEntity.class)
private Collection<Object> containedIndexedEmbeddedWithCast = new ArrayList<>();
@IndexedEmbedded
@Embedded
private ContainingEmbeddable embeddedAssociations;
/*
* No mappedBy here. The inverse side of associations within an element collection cannot use mappedBy.
* If they do, Hibernate ORM will fail (throw an exception) while attempting to walk down the mappedBy path,
* because it assumes the prefix of that path is an embeddable,
* and in this case it is a List.
* TODO use mappedBy when the above gets fixed in Hibernate ORM
*/
@OneToMany
@OrderColumn(name = "idx")
@JoinTable(name = "i_containedECAssocIdxEmb",
joinColumns = @JoinColumn(name = "containing"),
inverseJoinColumns = @JoinColumn(name = "contained"))
@AssociationInverseSide(inversePath = @ObjectPath({
@PropertyValue(propertyName = "elementCollectionAssociations"),
@PropertyValue(propertyName = "containingAsIndexedEmbedded")
}))
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
private List<ContainedEntity> containedElementCollectionAssociationsIndexedEmbedded;
/*
* No mappedBy here. Same reason as just above.
* TODO use mappedBy when the above gets fixed in Hibernate ORM
*/
@OneToMany
@OrderColumn(name = "idx")
@JoinTable(name = "i_containedECAssocNonIdxEmb",
joinColumns = @JoinColumn(name = "containing"),
inverseJoinColumns = @JoinColumn(name = "contained"))
@AssociationInverseSide(inversePath = @ObjectPath({
@PropertyValue(propertyName = "elementCollectionAssociations"),
@PropertyValue(propertyName = "containingAsNonIndexedEmbedded")
}))
private List<ContainedEntity> containedElementCollectionAssociationsNonIndexedEmbedded;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getNonIndexedField() {
return nonIndexedField;
}
public void setNonIndexedField(String nonIndexedField) {
this.nonIndexedField = nonIndexedField;
}
public ContainingEntity getParent() {
return parent;
}
public void setParent(ContainingEntity parent) {
this.parent = parent;
}
public ContainingEntity getChild() {
return child;
}
public void setChild(ContainingEntity child) {
this.child = child;
}
public Collection<ContainedEntity> getContainedIndexedEmbedded() {
return containedIndexedEmbedded;
}
public void setContainedIndexedEmbedded(Collection<ContainedEntity> containedIndexedEmbedded) {
this.containedIndexedEmbedded = containedIndexedEmbedded;
}
public Collection<ContainedEntity> getContainedNonIndexedEmbedded() {
return containedNonIndexedEmbedded;
}
public void setContainedNonIndexedEmbedded(Collection<ContainedEntity> containedNonIndexedEmbedded) {
this.containedNonIndexedEmbedded = containedNonIndexedEmbedded;
}
public Collection<ContainedEntity> getContainedIndexedEmbeddedShallowReindexOnUpdate() {
return containedIndexedEmbeddedShallowReindexOnUpdate;
}
public void setContainedIndexedEmbeddedShallowReindexOnUpdate(
Collection<ContainedEntity> containedIndexedEmbeddedShallowReindexOnUpdate) {
this.containedIndexedEmbeddedShallowReindexOnUpdate = containedIndexedEmbeddedShallowReindexOnUpdate;
}
public Collection<ContainedEntity> getContainedIndexedEmbeddedNoReindexOnUpdate() {
return containedIndexedEmbeddedNoReindexOnUpdate;
}
public void setContainedIndexedEmbeddedNoReindexOnUpdate(
Collection<ContainedEntity> containedIndexedEmbeddedNoReindexOnUpdate) {
this.containedIndexedEmbeddedNoReindexOnUpdate = containedIndexedEmbeddedNoReindexOnUpdate;
}
public Collection<ContainedEntity> getContainedUsedInCrossEntityDerivedProperty() {
return containedUsedInCrossEntityDerivedProperty;
}
public void setContainedUsedInCrossEntityDerivedProperty(
Collection<ContainedEntity> containedUsedInCrossEntityDerivedProperty) {
this.containedUsedInCrossEntityDerivedProperty = containedUsedInCrossEntityDerivedProperty;
}
public Collection<Object> getContainedIndexedEmbeddedWithCast() {
return containedIndexedEmbeddedWithCast;
}
public ContainingEmbeddable getEmbeddedAssociations() {
return embeddedAssociations;
}
public void setEmbeddedAssociations(ContainingEmbeddable embeddedAssociations) {
this.embeddedAssociations = embeddedAssociations;
}
public List<ContainedEntity> getContainedElementCollectionAssociationsIndexedEmbedded() {
return containedElementCollectionAssociationsIndexedEmbedded;
}
public List<ContainedEntity> getContainedElementCollectionAssociationsNonIndexedEmbedded() {
return containedElementCollectionAssociationsNonIndexedEmbedded;
}
@Transient
@GenericField
@IndexingDependency(derivedFrom = {
@ObjectPath({
@PropertyValue(propertyName = "containedUsedInCrossEntityDerivedProperty"),
@PropertyValue(propertyName = "fieldUsedInCrossEntityDerivedField1")
}),
@ObjectPath({
@PropertyValue(propertyName = "containedUsedInCrossEntityDerivedProperty"),
@PropertyValue(propertyName = "fieldUsedInCrossEntityDerivedField2")
})
})
public Optional<String> getCrossEntityDerivedField() {
return computeDerived(
containedUsedInCrossEntityDerivedProperty.stream().flatMap( c -> Stream.of(
c.getFieldUsedInCrossEntityDerivedField1(),
c.getFieldUsedInCrossEntityDerivedField2()
) )
);
}
static final ContainingEntityPrimitives<ContainingEntity,
ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>> PRIMITIVES = new ContainingEntityPrimitives<ContainingEntity,
ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>>() {
@Override
public Class<ContainingEntity> entityClass() {
return ContainingEntity.class;
}
@Override
public ContainingEntity newInstance(int id) {
ContainingEntity entity = new ContainingEntity();
entity.setId( id );
return entity;
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEntity> child() {
return PropertyAccessor.create( ContainingEntity::setChild );
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEntity> parent() {
return PropertyAccessor.create( ContainingEntity::setParent );
}
@Override
public Collection<ContainedEntity> newContainedAssociation(Collection<ContainedEntity> original) {
return new ArrayList<>( original );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedIndexedEmbedded, ContainingEntity::setContainedIndexedEmbedded );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedNonIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedNonIndexedEmbedded,
ContainingEntity::setContainedNonIndexedEmbedded );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedIndexedEmbeddedShallowReindexOnUpdate() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedIndexedEmbeddedShallowReindexOnUpdate,
ContainingEntity::setContainedIndexedEmbeddedShallowReindexOnUpdate );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedIndexedEmbeddedNoReindexOnUpdate() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedIndexedEmbeddedNoReindexOnUpdate,
ContainingEntity::setContainedIndexedEmbeddedNoReindexOnUpdate );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedUsedInCrossEntityDerivedProperty() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedUsedInCrossEntityDerivedProperty,
ContainingEntity::setContainedUsedInCrossEntityDerivedProperty );
}
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
public MultiValuedPropertyAccessor<ContainingEntity,
ContainedEntity,
Collection<ContainedEntity>> containedIndexedEmbeddedWithCast() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
root -> (Collection) root.getContainedIndexedEmbeddedWithCast() );
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEmbeddable> embeddedAssociations() {
return PropertyAccessor.create( ContainingEntity::setEmbeddedAssociations,
ContainingEntity::getEmbeddedAssociations );
}
@Override
public PropertyAccessor<ContainingEntity,
ContainedEntity> containedElementCollectionAssociationsIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedElementCollectionAssociationsIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainingEntity,
ContainedEntity> containedElementCollectionAssociationsNonIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getContainedElementCollectionAssociationsNonIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainingEntity, String> nonIndexedField() {
return PropertyAccessor.create( ContainingEntity::setNonIndexedField );
}
};
}
public static class ContainingEmbeddable {
@OneToMany(mappedBy = "embeddedAssociations.containingAsIndexedEmbedded")
@OrderBy("id asc") // Make sure the iteration order is predictable
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
private Collection<ContainedEntity> containedIndexedEmbedded = new ArrayList<>();
@OneToMany(mappedBy = "embeddedAssociations.containingAsNonIndexedEmbedded")
@OrderBy("id asc") // Make sure the iteration order is predictable
private Collection<ContainedEntity> containedNonIndexedEmbedded = new ArrayList<>();
public Collection<ContainedEntity> getContainedIndexedEmbedded() {
return containedIndexedEmbedded;
}
public void setContainedIndexedEmbedded(Collection<ContainedEntity> containedIndexedEmbedded) {
this.containedIndexedEmbedded = containedIndexedEmbedded;
}
public Collection<ContainedEntity> getContainedNonIndexedEmbedded() {
return containedNonIndexedEmbedded;
}
public void setContainedNonIndexedEmbedded(Collection<ContainedEntity> containedNonIndexedEmbedded) {
this.containedNonIndexedEmbedded = containedNonIndexedEmbedded;
}
static final ContainingEmbeddablePrimitives<ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>> PRIMITIVES = new ContainingEmbeddablePrimitives<ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>>() {
@Override
public ContainingEmbeddable newInstance() {
return new ContainingEmbeddable();
}
@Override
public MultiValuedPropertyAccessor<ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>> containedIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEmbeddable::getContainedIndexedEmbedded,
ContainingEmbeddable::setContainedIndexedEmbedded );
}
@Override
public MultiValuedPropertyAccessor<ContainingEmbeddable,
ContainedEntity,
Collection<ContainedEntity>> containedNonIndexedEmbedded() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEmbeddable::getContainedNonIndexedEmbedded,
ContainingEmbeddable::setContainedNonIndexedEmbedded );
}
};
}
@Entity(name = "indexed")
@Indexed(index = IndexedEntity.INDEX)
public static class IndexedEntity extends ContainingEntity {
static final String INDEX = "IndexedEntity";
static final IndexedEntityPrimitives<IndexedEntity> PRIMITIVES = new IndexedEntityPrimitives<IndexedEntity>() {
@Override
public Class<IndexedEntity> entityClass() {
return IndexedEntity.class;
}
@Override
public String indexName() {
return IndexedEntity.INDEX;
}
@Override
public IndexedEntity newInstance(int id) {
IndexedEntity entity = new IndexedEntity();
entity.setId( id );
return entity;
}
};
}
@Entity(name = "contained")
public static class ContainedEntity {
@Id
private Integer id;
@ManyToOne
private ContainingEntity containingAsIndexedEmbedded;
@ManyToOne
@JoinColumn(name = "containedNonIndexedEmbedded")
private ContainingEntity containingAsNonIndexedEmbedded;
@ManyToOne
@JoinColumn(name = "indexedEmbeddedShallow")
private ContainingEntity containingAsIndexedEmbeddedShallowReindexOnUpdate;
@ManyToOne
@JoinColumn(name = "indexedEmbeddedNoReindex")
private ContainingEntity containingAsIndexedEmbeddedNoReindexOnUpdate;
@ManyToOne
@JoinColumn(name = "containedCrossEntityDP")
private ContainingEntity containingAsUsedInCrossEntityDerivedProperty;
@ManyToOne(targetEntity = ContainingEntity.class)
@JoinColumn(name = "containedIndexedEmbeddedCast")
private Object containingAsIndexedEmbeddedWithCast;
@Embedded
private ContainedEmbeddable embeddedAssociations;
@ElementCollection
@Embedded
@OrderColumn(name = "idx")
@CollectionTable(name = "c_ECAssoc")
private List<ContainedEmbeddable> elementCollectionAssociations = new ArrayList<>();
@Basic
@GenericField
private String indexedField;
@ElementCollection
@OrderColumn(name = "idx")
@CollectionTable(name = "contained_IElementCF")
@GenericField
private List<String> indexedElementCollectionField = new ArrayList<>();
@Basic
@GenericField
// Keep this annotation, it should be ignored because the field is not included in the @IndexedEmbedded
private String nonIndexedField;
@ElementCollection
@OrderColumn(name = "idx")
@CollectionTable(name = "nonIndexedECF")
@Column(name = "nonIndexed")
@GenericField
// Keep this annotation, it should be ignored because the field is not included in the @IndexedEmbedded
private List<String> nonIndexedElementCollectionField = new ArrayList<>();
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUIContainedDF1")
private String fieldUsedInContainedDerivedField1;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUIContainedDF2")
private String fieldUsedInContainedDerivedField2;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUICrossEntityDF1")
private String fieldUsedInCrossEntityDerivedField1;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUICrossEntityDF2")
private String fieldUsedInCrossEntityDerivedField2;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public ContainingEntity getContainingAsIndexedEmbedded() {
return containingAsIndexedEmbedded;
}
public void setContainingAsIndexedEmbedded(ContainingEntity containingAsIndexedEmbedded) {
this.containingAsIndexedEmbedded = containingAsIndexedEmbedded;
}
public ContainingEntity getContainingAsNonIndexedEmbedded() {
return containingAsNonIndexedEmbedded;
}
public void setContainingAsNonIndexedEmbedded(ContainingEntity containingAsNonIndexedEmbedded) {
this.containingAsNonIndexedEmbedded = containingAsNonIndexedEmbedded;
}
public ContainingEntity getContainingAsIndexedEmbeddedShallowReindexOnUpdate() {
return containingAsIndexedEmbeddedShallowReindexOnUpdate;
}
public void setContainingAsIndexedEmbeddedShallowReindexOnUpdate(
ContainingEntity containingAsIndexedEmbeddedShallowReindexOnUpdate) {
this.containingAsIndexedEmbeddedShallowReindexOnUpdate = containingAsIndexedEmbeddedShallowReindexOnUpdate;
}
public ContainingEntity getContainingAsIndexedEmbeddedNoReindexOnUpdate() {
return containingAsIndexedEmbeddedNoReindexOnUpdate;
}
public void setContainingAsIndexedEmbeddedNoReindexOnUpdate(
ContainingEntity containingAsIndexedEmbeddedNoReindexOnUpdate) {
this.containingAsIndexedEmbeddedNoReindexOnUpdate = containingAsIndexedEmbeddedNoReindexOnUpdate;
}
public ContainingEntity getContainingAsUsedInCrossEntityDerivedProperty() {
return containingAsUsedInCrossEntityDerivedProperty;
}
public void setContainingAsUsedInCrossEntityDerivedProperty(
ContainingEntity containingAsUsedInCrossEntityDerivedProperty) {
this.containingAsUsedInCrossEntityDerivedProperty = containingAsUsedInCrossEntityDerivedProperty;
}
public Object getContainingAsIndexedEmbeddedWithCast() {
return containingAsIndexedEmbeddedWithCast;
}
public void setContainingAsIndexedEmbeddedWithCast(Object containingAsIndexedEmbeddedWithCast) {
this.containingAsIndexedEmbeddedWithCast = containingAsIndexedEmbeddedWithCast;
}
public ContainedEmbeddable getEmbeddedAssociations() {
return embeddedAssociations;
}
public void setEmbeddedAssociations(ContainedEmbeddable embeddedAssociations) {
this.embeddedAssociations = embeddedAssociations;
}
public List<ContainedEmbeddable> getElementCollectionAssociations() {
return elementCollectionAssociations;
}
public String getIndexedField() {
return indexedField;
}
public void setIndexedField(String indexedField) {
this.indexedField = indexedField;
}
public List<String> getIndexedElementCollectionField() {
return indexedElementCollectionField;
}
public void setIndexedElementCollectionField(List<String> indexedElementCollectionField) {
this.indexedElementCollectionField = indexedElementCollectionField;
}
public String getNonIndexedField() {
return nonIndexedField;
}
public void setNonIndexedField(String nonIndexedField) {
this.nonIndexedField = nonIndexedField;
}
public List<String> getNonIndexedElementCollectionField() {
return nonIndexedElementCollectionField;
}
public void setNonIndexedElementCollectionField(List<String> nonIndexedElementCollectionField) {
this.nonIndexedElementCollectionField = nonIndexedElementCollectionField;
}
public String getFieldUsedInContainedDerivedField1() {
return fieldUsedInContainedDerivedField1;
}
public void setFieldUsedInContainedDerivedField1(String fieldUsedInContainedDerivedField1) {
this.fieldUsedInContainedDerivedField1 = fieldUsedInContainedDerivedField1;
}
public String getFieldUsedInContainedDerivedField2() {
return fieldUsedInContainedDerivedField2;
}
public void setFieldUsedInContainedDerivedField2(String fieldUsedInContainedDerivedField2) {
this.fieldUsedInContainedDerivedField2 = fieldUsedInContainedDerivedField2;
}
public String getFieldUsedInCrossEntityDerivedField1() {
return fieldUsedInCrossEntityDerivedField1;
}
public void setFieldUsedInCrossEntityDerivedField1(String fieldUsedInCrossEntityDerivedField1) {
this.fieldUsedInCrossEntityDerivedField1 = fieldUsedInCrossEntityDerivedField1;
}
public String getFieldUsedInCrossEntityDerivedField2() {
return fieldUsedInCrossEntityDerivedField2;
}
public void setFieldUsedInCrossEntityDerivedField2(String fieldUsedInCrossEntityDerivedField2) {
this.fieldUsedInCrossEntityDerivedField2 = fieldUsedInCrossEntityDerivedField2;
}
@Transient
@GenericField
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "fieldUsedInContainedDerivedField1")),
@ObjectPath(@PropertyValue(propertyName = "fieldUsedInContainedDerivedField2"))
})
public Optional<String> getContainedDerivedField() {
return computeDerived( Stream.of( fieldUsedInContainedDerivedField1, fieldUsedInContainedDerivedField2 ) );
}
static ContainedEntityPrimitives<ContainedEntity, ContainedEmbeddable, ContainingEntity> PRIMITIVES =
new ContainedEntityPrimitives<ContainedEntity, ContainedEmbeddable, ContainingEntity>() {
@Override
public Class<ContainedEntity> entityClass() {
return ContainedEntity.class;
}
@Override
public ContainedEntity newInstance(int id) {
ContainedEntity entity = new ContainedEntity();
entity.setId( id );
return entity;
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbedded() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbedded,
ContainedEntity::getContainingAsIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsNonIndexedEmbedded() {
return PropertyAccessor.create( ContainedEntity::setContainingAsNonIndexedEmbedded,
ContainedEntity::getContainingAsNonIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity,
ContainingEntity> containingAsIndexedEmbeddedShallowReindexOnUpdate() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedShallowReindexOnUpdate,
ContainedEntity::getContainingAsIndexedEmbeddedShallowReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbeddedNoReindexOnUpdate() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedNoReindexOnUpdate,
ContainedEntity::getContainingAsIndexedEmbeddedNoReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsUsedInCrossEntityDerivedProperty() {
return PropertyAccessor.create( ContainedEntity::setContainingAsUsedInCrossEntityDerivedProperty,
ContainedEntity::getContainingAsUsedInCrossEntityDerivedProperty );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbeddedWithCast() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedWithCast );
}
@Override
public PropertyAccessor<ContainedEntity, ContainedEmbeddable> embeddedAssociations() {
return PropertyAccessor.create( ContainedEntity::setEmbeddedAssociations,
ContainedEntity::getEmbeddedAssociations );
}
@Override
public MultiValuedPropertyAccessor<ContainedEntity,
ContainedEmbeddable,
List<ContainedEmbeddable>> elementCollectionAssociations() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainedEntity::getElementCollectionAssociations );
}
@Override
public PropertyAccessor<ContainedEntity, String> indexedField() {
return PropertyAccessor.create( ContainedEntity::setIndexedField );
}
@Override
public PropertyAccessor<ContainedEntity, String> nonIndexedField() {
return PropertyAccessor.create( ContainedEntity::setNonIndexedField );
}
@Override
public MultiValuedPropertyAccessor<ContainedEntity, String, List<String>> indexedElementCollectionField() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainedEntity::getIndexedElementCollectionField,
ContainedEntity::setIndexedElementCollectionField );
}
@Override
public MultiValuedPropertyAccessor<ContainedEntity,
String,
List<String>> nonIndexedElementCollectionField() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainedEntity::getNonIndexedElementCollectionField,
ContainedEntity::setNonIndexedElementCollectionField );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInContainedDerivedField1() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInContainedDerivedField1 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInContainedDerivedField2() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInContainedDerivedField2 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInCrossEntityDerivedField1() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInCrossEntityDerivedField1 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInCrossEntityDerivedField2() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInCrossEntityDerivedField2 );
}
};
}
public static class ContainedEmbeddable {
@ManyToOne
@JoinColumn(name = "emb_containedIdxEmbedded")
private ContainingEntity containingAsIndexedEmbedded;
@ManyToOne
@JoinColumn(name = "emb_containedNonIdxEmbedded")
private ContainingEntity containingAsNonIndexedEmbedded;
public ContainingEntity getContainingAsIndexedEmbedded() {
return containingAsIndexedEmbedded;
}
public void setContainingAsIndexedEmbedded(ContainingEntity containingAsIndexedEmbedded) {
this.containingAsIndexedEmbedded = containingAsIndexedEmbedded;
}
public ContainingEntity getContainingAsNonIndexedEmbedded() {
return containingAsNonIndexedEmbedded;
}
public void setContainingAsNonIndexedEmbedded(ContainingEntity containingAsNonIndexedEmbedded) {
this.containingAsNonIndexedEmbedded = containingAsNonIndexedEmbedded;
}
static ContainedEmbeddablePrimitives<ContainedEmbeddable, ContainingEntity> PRIMITIVES =
new ContainedEmbeddablePrimitives<ContainedEmbeddable, ContainingEntity>() {
@Override
public ContainedEmbeddable newInstance() {
return new ContainedEmbeddable();
}
@Override
public PropertyAccessor<ContainedEmbeddable, ContainingEntity> containingAsIndexedEmbedded() {
return PropertyAccessor.create( ContainedEmbeddable::setContainingAsIndexedEmbedded,
ContainedEmbeddable::getContainingAsIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEmbeddable, ContainingEntity> containingAsNonIndexedEmbedded() {
return PropertyAccessor.create( ContainedEmbeddable::setContainingAsNonIndexedEmbedded,
ContainedEmbeddable::getContainingAsNonIndexedEmbedded );
}
};
}
}
|
openjdk/nashorn | 36,734 | src/org.openjdk.nashorn/share/classes/org/openjdk/nashorn/internal/codegen/types/Type.java | /*
* Copyright (c) 2010, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.nashorn.internal.codegen.types;
import static org.objectweb.asm.Opcodes.DALOAD;
import static org.objectweb.asm.Opcodes.DASTORE;
import static org.objectweb.asm.Opcodes.DUP;
import static org.objectweb.asm.Opcodes.DUP2;
import static org.objectweb.asm.Opcodes.DUP2_X1;
import static org.objectweb.asm.Opcodes.DUP2_X2;
import static org.objectweb.asm.Opcodes.DUP_X1;
import static org.objectweb.asm.Opcodes.DUP_X2;
import static org.objectweb.asm.Opcodes.IALOAD;
import static org.objectweb.asm.Opcodes.IASTORE;
import static org.objectweb.asm.Opcodes.INVOKESTATIC;
import static org.objectweb.asm.Opcodes.LALOAD;
import static org.objectweb.asm.Opcodes.LASTORE;
import static org.objectweb.asm.Opcodes.NEWARRAY;
import static org.objectweb.asm.Opcodes.POP;
import static org.objectweb.asm.Opcodes.POP2;
import static org.objectweb.asm.Opcodes.SWAP;
import static org.objectweb.asm.Opcodes.T_DOUBLE;
import static org.objectweb.asm.Opcodes.T_INT;
import static org.objectweb.asm.Opcodes.T_LONG;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.objectweb.asm.MethodVisitor;
import org.openjdk.nashorn.internal.codegen.CompilerConstants.Call;
import org.openjdk.nashorn.internal.runtime.Context;
import org.openjdk.nashorn.internal.runtime.ScriptObject;
import org.openjdk.nashorn.internal.runtime.Undefined;
/**
* This is the representation of a JavaScript type, disassociated from java
* Classes, with the basis for conversion weight, mapping to ASM types
* and implementing the ByteCodeOps interface which tells this type
* how to generate code for various operations.
*
* Except for ClassEmitter, this is the only class that has to know
* about the underlying byte code generation system.
*
* The different types know how to generate bytecode for the different
* operations, inherited from BytecodeOps, that they support. This avoids
* if/else chains depending on type in several cases and allows for
* more readable and shorter code
*
* The Type class also contains logic used by the type inference and
* for comparing types against each other, as well as the concepts
* of narrower to wider types. The widest type is an object. Ideally we
* would like as narrow types as possible for code to be efficient, e.g
* INTs rather than OBJECTs
*/
public abstract class Type implements Comparable<Type>, BytecodeOps, Serializable {
private static final long serialVersionUID = 1L;
/** Human readable name for type */
private transient final String name;
/** Descriptor for type */
private transient final String descriptor;
/** The "weight" of the type. Used for picking widest/least specific common type */
private transient final int weight;
/** How many bytecode slots does this type occupy */
private transient final int slots;
/** The class for this type */
private final Class<?> clazz;
/**
* Cache for internal types - this is a query that requires complex stringbuilding inside
* ASM and it saves startup time to cache the type mappings
*/
private static final Map<Class<?>, org.objectweb.asm.Type> INTERNAL_TYPE_CACHE =
Collections.synchronizedMap(new WeakHashMap<Class<?>, org.objectweb.asm.Type>());
/** Internal ASM type for this Type - computed once at construction */
private transient final org.objectweb.asm.Type internalType;
/** Weights are used to decide which types are "wider" than other types */
protected static final int MIN_WEIGHT = -1;
/** Set way below Integer.MAX_VALUE to prevent overflow when adding weights. Objects are still heaviest. */
protected static final int MAX_WEIGHT = 20;
/**
* Constructor
*
* @param clazz class for type
* @param weight weight - higher is more generic
* @param slots how many bytecode slots the type takes up
*/
Type(final String name, final Class<?> clazz, final int weight, final int slots) {
this.name = name;
this.clazz = clazz;
this.descriptor = org.objectweb.asm.Type.getDescriptor(clazz);
this.weight = weight;
assert weight >= MIN_WEIGHT && weight <= MAX_WEIGHT : "illegal type weight: " + weight;
this.slots = slots;
this.internalType = getInternalType(clazz);
}
/**
* Get the weight of this type - use this e.g. for sorting method descriptors
* @return the weight
*/
public int getWeight() {
return weight;
}
/**
* Get the Class representing this type
* @return the class for this type
*/
public Class<?> getTypeClass() {
return clazz;
}
/**
* For specialization, return the next, slightly more difficulty, type
* to test.
*
* @return the next Type
*/
public Type nextWider() {
return null;
}
/**
* Get the boxed type for this class
* @return the boxed version of this type or null if N/A
*/
public Class<?> getBoxedType() {
assert !getTypeClass().isPrimitive();
return null;
}
/**
* Returns the character describing the bytecode type for this value on the stack or local variable, identical to
* what would be used as the prefix for a bytecode {@code LOAD} or {@code STORE} instruction, therefore it must be
* one of {@code A, F, D, I, L}. Also, the special value {@code U} is used for local variable slots that haven't
* been initialized yet (it can't appear for a value pushed to the operand stack, those always have known values).
* Note that while we allow all JVM internal types, Nashorn doesn't necessarily use them all - currently we don't
* have floats, only doubles, but that might change in the future.
* @return the character describing the bytecode type for this value on the stack.
*/
public abstract char getBytecodeStackType();
/**
* Generate a method descriptor given a return type and a param array
*
* @param returnType return type
* @param types parameters
*
* @return a descriptor string
*/
public static String getMethodDescriptor(final Type returnType, final Type... types) {
final org.objectweb.asm.Type[] itypes = new org.objectweb.asm.Type[types.length];
for (int i = 0; i < types.length; i++) {
itypes[i] = types[i].getInternalType();
}
return org.objectweb.asm.Type.getMethodDescriptor(returnType.getInternalType(), itypes);
}
/**
* Generate a method descriptor given a return type and a param array
*
* @param returnType return type
* @param types parameters
*
* @return a descriptor string
*/
public static String getMethodDescriptor(final Class<?> returnType, final Class<?>... types) {
final org.objectweb.asm.Type[] itypes = new org.objectweb.asm.Type[types.length];
for (int i = 0; i < types.length; i++) {
itypes[i] = getInternalType(types[i]);
}
return org.objectweb.asm.Type.getMethodDescriptor(getInternalType(returnType), itypes);
}
/**
* Return a character representing {@code type} in a method signature.
*
* @param type parameter type
* @return descriptor character
*/
public static char getShortSignatureDescriptor(final Type type) {
// Use 'Z' for boolean parameters as we need to distinguish from int
if (type instanceof BooleanType) {
return 'Z';
}
return type.getBytecodeStackType();
}
/**
* Return the type for an internal type, package private - do not use
* outside code gen
*
* @param itype internal type
* @return Nashorn type
*/
@SuppressWarnings("fallthrough")
private static Type typeFor(final org.objectweb.asm.Type itype) {
switch (itype.getSort()) {
case org.objectweb.asm.Type.BOOLEAN:
return BOOLEAN;
case org.objectweb.asm.Type.INT:
return INT;
case org.objectweb.asm.Type.LONG:
return LONG;
case org.objectweb.asm.Type.DOUBLE:
return NUMBER;
case org.objectweb.asm.Type.OBJECT:
if (Context.isStructureClass(itype.getClassName())) {
return SCRIPT_OBJECT;
}
return cacheByName.computeIfAbsent(itype.getClassName(), (name) -> {
try {
return Type.typeFor(Class.forName(name));
} catch(final ClassNotFoundException e) {
throw new AssertionError(e);
}
});
case org.objectweb.asm.Type.VOID:
return null;
case org.objectweb.asm.Type.ARRAY:
switch (itype.getElementType().getSort()) {
case org.objectweb.asm.Type.DOUBLE:
return NUMBER_ARRAY;
case org.objectweb.asm.Type.INT:
return INT_ARRAY;
case org.objectweb.asm.Type.LONG:
return LONG_ARRAY;
default:
assert false;
case org.objectweb.asm.Type.OBJECT:
return OBJECT_ARRAY;
}
default:
assert false : "Unknown itype : " + itype + " sort " + itype.getSort();
break;
}
return null;
}
/**
* Get the return type for a method
*
* @param methodDescriptor method descriptor
* @return return type
*/
public static Type getMethodReturnType(final String methodDescriptor) {
return Type.typeFor(org.objectweb.asm.Type.getReturnType(methodDescriptor));
}
/**
* Get type array representing arguments of a method in order
*
* @param methodDescriptor method descriptor
* @return parameter type array
*/
public static Type[] getMethodArguments(final String methodDescriptor) {
final org.objectweb.asm.Type[] itypes = org.objectweb.asm.Type.getArgumentTypes(methodDescriptor);
final Type[] types = new Type[itypes.length];
for (int i = 0; i < itypes.length; i++) {
types[i] = Type.typeFor(itypes[i]);
}
return types;
}
/**
* Write a map of {@code int} to {@code Type} to an output stream. This is used to store deoptimization state.
*
* @param typeMap the type map
* @param output data output
* @throws IOException if write cannot be completed
*/
public static void writeTypeMap(final Map<Integer, Type> typeMap, final DataOutput output) throws IOException {
if (typeMap == null) {
output.writeInt(0);
} else {
output.writeInt(typeMap.size());
for(final Map.Entry<Integer, Type> e: typeMap.entrySet()) {
output.writeInt(e.getKey());
final byte typeChar;
final Type type = e.getValue();
if(type == Type.OBJECT) {
typeChar = 'L';
} else if (type == Type.NUMBER) {
typeChar = 'D';
} else if (type == Type.LONG) {
typeChar = 'J';
} else {
throw new AssertionError();
}
output.writeByte(typeChar);
}
}
}
/**
* Read a map of {@code int} to {@code Type} from an input stream. This is used to store deoptimization state.
*
* @param input data input
* @return type map
* @throws IOException if read cannot be completed
*/
public static Map<Integer, Type> readTypeMap(final DataInput input) throws IOException {
final int size = input.readInt();
if (size <= 0) {
return null;
}
final Map<Integer, Type> map = new TreeMap<>();
for(int i = 0; i < size; ++i) {
final int pp = input.readInt();
final int typeChar = input.readByte();
final Type type;
switch (typeChar) {
case 'L': type = Type.OBJECT; break;
case 'D': type = Type.NUMBER; break;
case 'J': type = Type.LONG; break;
default: continue;
}
map.put(pp, type);
}
return map;
}
static org.objectweb.asm.Type getInternalType(final String className) {
return org.objectweb.asm.Type.getType(className);
}
private org.objectweb.asm.Type getInternalType() {
return internalType;
}
private static org.objectweb.asm.Type lookupInternalType(final Class<?> type) {
final Map<Class<?>, org.objectweb.asm.Type> c = INTERNAL_TYPE_CACHE;
org.objectweb.asm.Type itype = c.get(type);
if (itype != null) {
return itype;
}
itype = org.objectweb.asm.Type.getType(type);
c.put(type, itype);
return itype;
}
private static org.objectweb.asm.Type getInternalType(final Class<?> type) {
return lookupInternalType(type);
}
static void invokestatic(final MethodVisitor method, final Call call) {
method.visitMethodInsn(INVOKESTATIC, call.className(), call.name(), call.descriptor(), false);
}
/**
* Get the internal JVM name of a type
* @return the internal name
*/
public String getInternalName() {
return org.objectweb.asm.Type.getInternalName(getTypeClass());
}
/**
* Get the internal JVM name of type type represented by a given Java class
* @param clazz the class
* @return the internal name
*/
public static String getInternalName(final Class<?> clazz) {
return org.objectweb.asm.Type.getInternalName(clazz);
}
/**
* Determines whether a type is the UNKNOWN type, i.e. not set yet
* Used for type inference.
*
* @return true if UNKNOWN, false otherwise
*/
public boolean isUnknown() {
return this.equals(Type.UNKNOWN);
}
/**
* Determines whether this type represents an primitive type according to the ECMAScript specification,
* which includes Boolean, Number, and String.
*
* @return true if a JavaScript primitive type, false otherwise.
*/
public boolean isJSPrimitive() {
return !isObject() || isString();
}
/**
* Determines whether a type is the BOOLEAN type
* @return true if BOOLEAN, false otherwise
*/
public boolean isBoolean() {
return this.equals(Type.BOOLEAN);
}
/**
* Determines whether a type is the INT type
* @return true if INTEGER, false otherwise
*/
public boolean isInteger() {
return this.equals(Type.INT);
}
/**
* Determines whether a type is the LONG type
* @return true if LONG, false otherwise
*/
public boolean isLong() {
return this.equals(Type.LONG);
}
/**
* Determines whether a type is the NUMBER type
* @return true if NUMBER, false otherwise
*/
public boolean isNumber() {
return this.equals(Type.NUMBER);
}
/**
* Determines whether a type is numeric, i.e. NUMBER,
* INT, LONG.
*
* @return true if numeric, false otherwise
*/
public boolean isNumeric() {
return this instanceof NumericType;
}
/**
* Determines whether a type is an array type, i.e.
* OBJECT_ARRAY or NUMBER_ARRAY (for now)
*
* @return true if an array type, false otherwise
*/
public boolean isArray() {
return this instanceof ArrayType;
}
/**
* Determines if a type takes up two bytecode slots or not
*
* @return true if type takes up two bytecode slots rather than one
*/
public boolean isCategory2() {
return getSlots() == 2;
}
/**
* Determines whether a type is an OBJECT type, e.g. OBJECT, STRING,
* NUMBER_ARRAY etc.
*
* @return true if object type, false otherwise
*/
public boolean isObject() {
return this instanceof ObjectType;
}
/**
* Is this a primitive type (e.g int, long, double, boolean)
* @return true if primitive
*/
public boolean isPrimitive() {
return !isObject();
}
/**
* Determines whether a type is a STRING type
*
* @return true if object type, false otherwise
*/
public boolean isString() {
return this.equals(Type.STRING);
}
/**
* Determines whether a type is a CHARSEQUENCE type used internally strings
*
* @return true if CharSequence (internal string) type, false otherwise
*/
public boolean isCharSequence() {
return this.equals(Type.CHARSEQUENCE);
}
/**
* Determine if two types are equivalent, i.e. need no conversion
*
* @param type the second type to check
*
* @return true if types are equivalent, false otherwise
*/
public boolean isEquivalentTo(final Type type) {
return this.weight() == type.weight() || isObject() && type.isObject();
}
/**
* Determine if a type can be assigned to from another
*
* @param type0 the first type to check
* @param type1 the second type to check
*
* @return true if type1 can be written to type2, false otherwise
*/
public static boolean isAssignableFrom(final Type type0, final Type type1) {
if (type0.isObject() && type1.isObject()) {
return type0.weight() >= type1.weight();
}
return type0.weight() == type1.weight();
}
/**
* Determine if this type is assignable from another type
* @param type the type to check against
*
* @return true if "type" can be written to this type, false otherwise
*/
public boolean isAssignableFrom(final Type type) {
return Type.isAssignableFrom(this, type);
}
/**
* Determines is this type is equivalent to another, i.e. needs no conversion
* to be assigned to it.
*
* @param type0 the first type to check
* @param type1 the second type to check
*
* @return true if this type is equivalent to type, false otherwise
*/
public static boolean areEquivalent(final Type type0, final Type type1) {
return type0.isEquivalentTo(type1);
}
/**
* Determine the number of bytecode slots a type takes up
*
* @return the number of slots for this type, 1 or 2.
*/
public int getSlots() {
return slots;
}
/**
* Returns the widest or most common of two types
*
* @param type0 type one
* @param type1 type two
*
* @return the widest type
*/
public static Type widest(final Type type0, final Type type1) {
if (type0.isArray() && type1.isArray()) {
return ((ArrayType)type0).getElementType() == ((ArrayType)type1).getElementType() ? type0 : Type.OBJECT;
} else if (type0.isArray() != type1.isArray()) {
//array and non array is always object, widest(Object[], int) NEVER returns Object[], which has most weight. that does not make sense
return Type.OBJECT;
} else if (type0.isObject() && type1.isObject() && type0.getTypeClass() != type1.getTypeClass()) {
// Object<type=String> and Object<type=ScriptFunction> will produce Object
// TODO: maybe find most specific common superclass?
return Type.OBJECT;
}
return type0.weight() > type1.weight() ? type0 : type1;
}
/**
* Returns the widest or most common of two types, given as classes
*
* @param type0 type one
* @param type1 type two
*
* @return the widest type
*/
public static Class<?> widest(final Class<?> type0, final Class<?> type1) {
return widest(Type.typeFor(type0), Type.typeFor(type1)).getTypeClass();
}
/**
* When doing widening for return types of a function or a ternary operator, it is not valid to widen a boolean to
* anything other than object. Note that this wouldn't be necessary if {@code Type.widest} did not allow
* boolean-to-number widening. Eventually, we should address it there, but it affects too many other parts of the
* system and is sometimes legitimate (e.g. whenever a boolean value would undergo ToNumber conversion anyway).
* @param t1 type 1
* @param t2 type 2
* @return wider of t1 and t2, except if one is boolean and the other is neither boolean nor unknown, in which case
* {@code Type.OBJECT} is returned.
*/
public static Type widestReturnType(final Type t1, final Type t2) {
if (t1.isUnknown()) {
return t2;
} else if (t2.isUnknown()) {
return t1;
} else if(t1.isBoolean() != t2.isBoolean() || t1.isNumeric() != t2.isNumeric()) {
return Type.OBJECT;
}
return Type.widest(t1, t2);
}
/**
* Returns a generic version of the type. Basically, if the type {@link #isObject()}, returns {@link #OBJECT},
* otherwise returns the type unchanged.
* @param type the type to generify
* @return the generified type
*/
public static Type generic(final Type type) {
return type.isObject() ? Type.OBJECT : type;
}
/**
* Returns the narrowest or least common of two types
*
* @param type0 type one
* @param type1 type two
*
* @return the widest type
*/
public static Type narrowest(final Type type0, final Type type1) {
return type0.narrowerThan(type1) ? type0 : type1;
}
/**
* Check whether this type is strictly narrower than another one
* @param type type to check against
* @return true if this type is strictly narrower
*/
public boolean narrowerThan(final Type type) {
return weight() < type.weight();
}
/**
* Check whether this type is strictly wider than another one
* @param type type to check against
* @return true if this type is strictly wider
*/
public boolean widerThan(final Type type) {
return weight() > type.weight();
}
/**
* Returns the widest or most common of two types, but no wider than "limit"
*
* @param type0 type one
* @param type1 type two
* @param limit limiting type
*
* @return the widest type, but no wider than limit
*/
public static Type widest(final Type type0, final Type type1, final Type limit) {
final Type type = Type.widest(type0, type1);
if (type.weight() > limit.weight()) {
return limit;
}
return type;
}
/**
* Returns the widest or most common of two types, but no narrower than "limit"
*
* @param type0 type one
* @param type1 type two
* @param limit limiting type
*
* @return the widest type, but no wider than limit
*/
public static Type narrowest(final Type type0, final Type type1, final Type limit) {
final Type type = type0.weight() < type1.weight() ? type0 : type1;
if (type.weight() < limit.weight()) {
return limit;
}
return type;
}
/**
* Returns the narrowest of this type and another
*
* @param other type to compare against
*
* @return the widest type
*/
public Type narrowest(final Type other) {
return Type.narrowest(this, other);
}
/**
* Returns the widest of this type and another
*
* @param other type to compare against
*
* @return the widest type
*/
public Type widest(final Type other) {
return Type.widest(this, other);
}
/**
* Returns the weight of a type, used for type comparison
* between wider and narrower types
*
* @return the weight
*/
int weight() {
return weight;
}
/**
* Return the descriptor of a type, used for e.g. signature
* generation
*
* @return the descriptor
*/
public String getDescriptor() {
return descriptor;
}
/**
* Return the descriptor of a type, short version
* Used mainly for debugging purposes
*
* @return the short descriptor
*/
public String getShortDescriptor() {
return descriptor;
}
@Override
public String toString() {
return name;
}
/**
* Return the (possibly cached) Type object for this class
*
* @param clazz the class to check
*
* @return the Type representing this class
*/
public static Type typeFor(final Class<?> clazz) {
return cache.computeIfAbsent(clazz, (keyClass) -> {
assert !keyClass.isPrimitive() || keyClass == void.class;
return keyClass.isArray() ? new ArrayType(keyClass) : new ObjectType(keyClass);
});
}
@Override
public int compareTo(final Type o) {
return o.weight() - weight();
}
/**
* Common logic for implementing dup for all types
*
* @param method method visitor
* @param depth dup depth
*
* @return the type at the top of the stack afterwards
*/
@Override
public Type dup(final MethodVisitor method, final int depth) {
return Type.dup(method, this, depth);
}
/**
* Common logic for implementing swap for all types
*
* @param method method visitor
* @param other the type to swap with
*
* @return the type at the top of the stack afterwards, i.e. other
*/
@Override
public Type swap(final MethodVisitor method, final Type other) {
Type.swap(method, this, other);
return other;
}
/**
* Common logic for implementing pop for all types
*
* @param method method visitor
*
* @return the type that was popped
*/
@Override
public Type pop(final MethodVisitor method) {
Type.pop(method, this);
return this;
}
@Override
public Type loadEmpty(final MethodVisitor method) {
assert false : "unsupported operation";
return null;
}
/**
* Superclass logic for pop for all types
*
* @param method method emitter
* @param type type to pop
*/
protected static void pop(final MethodVisitor method, final Type type) {
method.visitInsn(type.isCategory2() ? POP2 : POP);
}
private static Type dup(final MethodVisitor method, final Type type, final int depth) {
final boolean cat2 = type.isCategory2();
switch (depth) {
case 0:
method.visitInsn(cat2 ? DUP2 : DUP);
break;
case 1:
method.visitInsn(cat2 ? DUP2_X1 : DUP_X1);
break;
case 2:
method.visitInsn(cat2 ? DUP2_X2 : DUP_X2);
break;
default:
return null; //invalid depth
}
return type;
}
private static void swap(final MethodVisitor method, final Type above, final Type below) {
if (below.isCategory2()) {
if (above.isCategory2()) {
method.visitInsn(DUP2_X2);
method.visitInsn(POP2);
} else {
method.visitInsn(DUP_X2);
method.visitInsn(POP);
}
} else {
if (above.isCategory2()) {
method.visitInsn(DUP2_X1);
method.visitInsn(POP2);
} else {
method.visitInsn(SWAP);
}
}
}
/** Mappings between java classes and their Type singletons */
private static final ConcurrentMap<Class<?>, Type> cache = new ConcurrentHashMap<>();
private static final ConcurrentMap<String, Type> cacheByName = new ConcurrentHashMap<>();
/**
* This is the boolean singleton, used for all boolean types
*/
public static final Type BOOLEAN = putInCache(new BooleanType());
/**
* This is an integer type, i.e INT, INT32.
*/
public static final BitwiseType INT = putInCache(new IntType());
/**
* This is the number singleton, used for all number types
*/
public static final NumericType NUMBER = putInCache(new NumberType());
/**
* This is the long singleton, used for all long types
*/
public static final Type LONG = putInCache(new LongType());
/**
* A string singleton
*/
public static final Type STRING = putInCache(new ObjectType(String.class));
/**
* This is the CharSequence singleton used to represent JS strings internally
* (either a {@code java.lang.String} or {@code org.openjdk.nashorn.internal.runtime.ConsString}.
*/
public static final Type CHARSEQUENCE = putInCache(new ObjectType(CharSequence.class));
/**
* This is the object singleton, used for all object types
*/
public static final Type OBJECT = putInCache(new ObjectType());
/**
* A undefined singleton
*/
public static final Type UNDEFINED = putInCache(new ObjectType(Undefined.class));
/**
* This is the singleton for ScriptObjects
*/
public static final Type SCRIPT_OBJECT = putInCache(new ObjectType(ScriptObject.class));
/**
* This is the singleton for integer arrays
*/
public static final ArrayType INT_ARRAY = putInCache(new ArrayType(int[].class) {
private static final long serialVersionUID = 1L;
@Override
public void astore(final MethodVisitor method) {
method.visitInsn(IASTORE);
}
@Override
public Type aload(final MethodVisitor method) {
method.visitInsn(IALOAD);
return INT;
}
@Override
public Type newarray(final MethodVisitor method) {
method.visitIntInsn(NEWARRAY, T_INT);
return this;
}
@Override
public Type getElementType() {
return INT;
}
});
/**
* This is the singleton for long arrays
*/
public static final ArrayType LONG_ARRAY = putInCache(new ArrayType(long[].class) {
private static final long serialVersionUID = 1L;
@Override
public void astore(final MethodVisitor method) {
method.visitInsn(LASTORE);
}
@Override
public Type aload(final MethodVisitor method) {
method.visitInsn(LALOAD);
return LONG;
}
@Override
public Type newarray(final MethodVisitor method) {
method.visitIntInsn(NEWARRAY, T_LONG);
return this;
}
@Override
public Type getElementType() {
return LONG;
}
});
/**
* This is the singleton for numeric arrays
*/
public static final ArrayType NUMBER_ARRAY = putInCache(new ArrayType(double[].class) {
private static final long serialVersionUID = 1L;
@Override
public void astore(final MethodVisitor method) {
method.visitInsn(DASTORE);
}
@Override
public Type aload(final MethodVisitor method) {
method.visitInsn(DALOAD);
return NUMBER;
}
@Override
public Type newarray(final MethodVisitor method) {
method.visitIntInsn(NEWARRAY, T_DOUBLE);
return this;
}
@Override
public Type getElementType() {
return NUMBER;
}
});
/** This is the singleton for object arrays */
public static final ArrayType OBJECT_ARRAY = putInCache(new ArrayType(Object[].class));
/** This type, always an object type, just a toString override */
public static final Type THIS = new ObjectType() {
private static final long serialVersionUID = 1L;
@Override
public String toString() {
return "this";
}
};
/** Scope type, always an object type, just a toString override */
public static final Type SCOPE = new ObjectType() {
private static final long serialVersionUID = 1L;
@Override
public String toString() {
return "scope";
}
};
private interface Unknown {
// EMPTY - used as a class that is absolutely not compatible with a type to represent "unknown"
}
private abstract static class ValueLessType extends Type {
private static final long serialVersionUID = 1L;
ValueLessType(final String name) {
super(name, Unknown.class, MIN_WEIGHT, 1);
}
@Override
public Type load(final MethodVisitor method, final int slot) {
throw new UnsupportedOperationException("load " + slot);
}
@Override
public void store(final MethodVisitor method, final int slot) {
throw new UnsupportedOperationException("store " + slot);
}
@Override
public Type ldc(final MethodVisitor method, final Object c) {
throw new UnsupportedOperationException("ldc " + c);
}
@Override
public Type loadUndefined(final MethodVisitor method) {
throw new UnsupportedOperationException("load undefined");
}
@Override
public Type loadForcedInitializer(final MethodVisitor method) {
throw new UnsupportedOperationException("load forced initializer");
}
@Override
public Type convert(final MethodVisitor method, final Type to) {
throw new UnsupportedOperationException("convert => " + to);
}
@Override
public void _return(final MethodVisitor method) {
throw new UnsupportedOperationException("return");
}
@Override
public Type add(final MethodVisitor method, final int programPoint) {
throw new UnsupportedOperationException("add");
}
}
/**
* This is the unknown type which is used as initial type for type
* inference. It has the minimum type width
*/
public static final Type UNKNOWN = new ValueLessType("<unknown>") {
private static final long serialVersionUID = 1L;
@Override
public String getDescriptor() {
return "<unknown>";
}
@Override
public char getBytecodeStackType() {
return 'U';
}
};
/**
* This is the unknown type which is used as initial type for type
* inference. It has the minimum type width
*/
public static final Type SLOT_2 = new ValueLessType("<slot_2>") {
private static final long serialVersionUID = 1L;
@Override
public String getDescriptor() {
return "<slot_2>";
}
@Override
public char getBytecodeStackType() {
throw new UnsupportedOperationException("getBytecodeStackType");
}
};
private static <T extends Type> T putInCache(final T type) {
cache.put(type.getTypeClass(), type);
return type;
}
/**
* Read resolve
* @return resolved type
*/
protected final Object readResolve() {
return Type.typeFor(clazz);
}
}
|
apache/incubator-hugegraph | 36,948 | hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hugegraph.backend.serializer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.NotImplementedException;
import org.apache.hugegraph.HugeException;
import org.apache.hugegraph.HugeGraph;
import org.apache.hugegraph.backend.BackendException;
import org.apache.hugegraph.backend.id.EdgeId;
import org.apache.hugegraph.backend.id.Id;
import org.apache.hugegraph.backend.id.IdGenerator;
import org.apache.hugegraph.backend.id.IdUtil;
import org.apache.hugegraph.backend.id.SplicingIdGenerator;
import org.apache.hugegraph.backend.query.Condition;
import org.apache.hugegraph.backend.query.ConditionQuery;
import org.apache.hugegraph.backend.query.IdPrefixQuery;
import org.apache.hugegraph.backend.query.IdRangeQuery;
import org.apache.hugegraph.backend.query.Query;
import org.apache.hugegraph.backend.store.BackendEntry;
import org.apache.hugegraph.config.HugeConfig;
import org.apache.hugegraph.iterator.CIter;
import org.apache.hugegraph.schema.EdgeLabel;
import org.apache.hugegraph.schema.IndexLabel;
import org.apache.hugegraph.schema.PropertyKey;
import org.apache.hugegraph.schema.SchemaElement;
import org.apache.hugegraph.schema.VertexLabel;
import org.apache.hugegraph.structure.HugeEdge;
import org.apache.hugegraph.structure.HugeEdgeProperty;
import org.apache.hugegraph.structure.HugeElement;
import org.apache.hugegraph.structure.HugeIndex;
import org.apache.hugegraph.structure.HugeIndex.IdWithExpiredTime;
import org.apache.hugegraph.structure.HugeProperty;
import org.apache.hugegraph.structure.HugeVertex;
import org.apache.hugegraph.structure.HugeVertexProperty;
import org.apache.hugegraph.type.HugeType;
import org.apache.hugegraph.type.define.AggregateType;
import org.apache.hugegraph.type.define.Cardinality;
import org.apache.hugegraph.type.define.DataType;
import org.apache.hugegraph.type.define.Directions;
import org.apache.hugegraph.type.define.Frequency;
import org.apache.hugegraph.type.define.HugeKeys;
import org.apache.hugegraph.type.define.IdStrategy;
import org.apache.hugegraph.type.define.IndexType;
import org.apache.hugegraph.type.define.SchemaStatus;
import org.apache.hugegraph.type.define.WriteType;
import org.apache.hugegraph.util.E;
import org.apache.hugegraph.util.JsonUtil;
import org.apache.tinkerpop.gremlin.structure.Edge;
import com.google.common.collect.ImmutableMap;
public class TextSerializer extends AbstractSerializer {
private static final String VALUE_SPLITOR = TextBackendEntry.VALUE_SPLITOR;
private static final String EDGE_NAME_ENDING =
ConditionQuery.INDEX_SYM_ENDING;
private static final String EDGE_OUT_TYPE = writeType(HugeType.EDGE_OUT);
public TextSerializer(HugeConfig config) {
super(config);
}
@Override
public TextBackendEntry newBackendEntry(HugeType type, Id id) {
return new TextBackendEntry(type, id);
}
private TextBackendEntry newBackendEntry(HugeElement elem) {
Id id = IdGenerator.of(writeEntryId(elem.id()));
return new TextBackendEntry(elem.type(), id);
}
private TextBackendEntry newBackendEntry(SchemaElement elem) {
Id id = IdGenerator.of(writeId(elem.id()));
return new TextBackendEntry(elem.type(), id);
}
@Override
protected TextBackendEntry convertEntry(BackendEntry backendEntry) {
if (!(backendEntry instanceof TextBackendEntry)) {
throw new HugeException("The entry '%s' is not TextBackendEntry",
backendEntry);
}
return (TextBackendEntry) backendEntry;
}
private String formatSyspropName(String name) {
return SplicingIdGenerator.concat(writeType(HugeType.SYS_PROPERTY),
name);
}
private String formatSyspropName(HugeKeys col) {
return this.formatSyspropName(col.string());
}
private String formatPropertyName(String key) {
return SplicingIdGenerator.concat(writeType(HugeType.PROPERTY), key);
}
private String formatPropertyName(HugeProperty<?> prop) {
return this.formatPropertyName(writeId(prop.propertyKey().id()));
}
private String formatPropertyValue(HugeProperty<?> prop) {
// May be a single value or a list of values
return JsonUtil.toJson(prop.value());
}
private String formatPropertyName() {
return HugeType.PROPERTY.string();
}
private String formatPropertyValues(HugeVertex vertex) {
int size = vertex.sizeOfProperties();
StringBuilder sb = new StringBuilder(64 * size);
// Vertex properties
int i = 0;
for (HugeProperty<?> property : vertex.getProperties()) {
sb.append(this.formatPropertyName(property));
sb.append(VALUE_SPLITOR);
sb.append(this.formatPropertyValue(property));
if (++i < size) {
sb.append(VALUE_SPLITOR);
}
}
return sb.toString();
}
private void parseProperty(String colName, String colValue,
HugeElement owner) {
String[] colParts = SplicingIdGenerator.split(colName);
assert colParts.length == 2 : colName;
// Get PropertyKey by PropertyKey id
PropertyKey pkey = owner.graph().propertyKey(readId(colParts[1]));
// Parse value
Object value = JsonUtil.fromJson(colValue, pkey.implementClazz());
// Set properties of vertex/edge
if (pkey.cardinality() == Cardinality.SINGLE) {
owner.addProperty(pkey, value);
} else {
if (!(value instanceof Collection)) {
throw new BackendException(
"Invalid value of non-single property: %s", colValue);
}
for (Object v : (Collection<?>) value) {
v = JsonUtil.castNumber(v, pkey.dataType().clazz());
owner.addProperty(pkey, v);
}
}
}
private void parseProperties(String colValue, HugeVertex vertex) {
if (colValue == null || colValue.isEmpty()) {
return;
}
String[] valParts = colValue.split(VALUE_SPLITOR);
E.checkState(valParts.length % 2 == 0,
"The property key values length must be even number, " +
"but got %s, length is '%s'",
Arrays.toString(valParts), valParts.length);
// Edge properties
for (int i = 0; i < valParts.length; i += 2) {
assert i + 1 < valParts.length;
this.parseProperty(valParts[i], valParts[i + 1], vertex);
}
}
private String formatEdgeName(HugeEdge edge) {
// Edge name: type + edge-label-name + sortKeys + targetVertex
return writeEdgeId(edge.idWithDirection(), false);
}
private String formatEdgeValue(HugeEdge edge) {
StringBuilder sb = new StringBuilder(256 * edge.sizeOfProperties());
// Edge id
sb.append(edge.id().asString());
// Write edge expired time
sb.append(VALUE_SPLITOR);
sb.append(this.formatSyspropName(HugeKeys.EXPIRED_TIME));
sb.append(VALUE_SPLITOR);
sb.append(edge.expiredTime());
// Edge properties
for (HugeProperty<?> property : edge.getProperties()) {
sb.append(VALUE_SPLITOR);
sb.append(this.formatPropertyName(property));
sb.append(VALUE_SPLITOR);
sb.append(this.formatPropertyValue(property));
}
return sb.toString();
}
/**
* Parse an edge from a column item
*/
private void parseEdge(String colName, String colValue,
HugeVertex vertex) {
String[] colParts = EdgeId.split(colName);
HugeGraph graph = vertex.graph();
boolean direction = colParts[0].equals(EDGE_OUT_TYPE);
String sortValues = readEdgeName(colParts[3]);
EdgeLabel edgeLabel = graph.edgeLabelOrNone(readId(colParts[1]));
EdgeLabel subEdgeLabel = graph.edgeLabelOrNone(readId(colParts[2]));
Id otherVertexId = readEntryId(colParts[4]);
// Construct edge
HugeEdge edge = HugeEdge.constructEdge(vertex, direction, subEdgeLabel,
sortValues, otherVertexId);
String[] valParts = colValue.split(VALUE_SPLITOR);
// Parse edge expired time
String name = this.formatSyspropName(HugeKeys.EXPIRED_TIME);
E.checkState(valParts[1].equals(name),
"Invalid system property name '%s'", valParts[1]);
edge.expiredTime(JsonUtil.fromJson(valParts[2], Long.class));
// Edge properties
for (int i = 3; i < valParts.length; i += 2) {
this.parseProperty(valParts[i], valParts[i + 1], edge);
}
}
private void parseColumn(String colName, String colValue,
HugeVertex vertex) {
// Column name
String type = SplicingIdGenerator.split(colName)[0];
// Parse property
if (type.equals(writeType(HugeType.PROPERTY))) {
this.parseProperties(colValue, vertex);
}
// Parse edge
else if (type.equals(writeType(HugeType.EDGE_OUT)) ||
type.equals(writeType(HugeType.EDGE_IN))) {
this.parseEdge(colName, colValue, vertex);
}
// Parse system property
else if (type.equals(writeType(HugeType.SYS_PROPERTY))) {
// pass
}
// Invalid entry
else {
E.checkState(false, "Invalid entry with unknown type(%s): %s",
type, colName);
}
}
@Override
public BackendEntry writeVertex(HugeVertex vertex) {
TextBackendEntry entry = newBackendEntry(vertex);
// Write label (NOTE: maybe just with edges if label is null)
if (vertex.schemaLabel() != null) {
entry.column(this.formatSyspropName(HugeKeys.LABEL),
writeId(vertex.schemaLabel().id()));
}
// Write expired time
entry.column(this.formatSyspropName(HugeKeys.EXPIRED_TIME),
writeLong(vertex.expiredTime()));
// Add all properties of a Vertex
entry.column(this.formatPropertyName(),
this.formatPropertyValues(vertex));
return entry;
}
@Override
public BackendEntry writeOlapVertex(HugeVertex vertex) {
throw new NotImplementedException("Unsupported writeOlapVertex()");
}
@Override
public BackendEntry writeVertexProperty(HugeVertexProperty<?> prop) {
throw new NotImplementedException("Unsupported writeVertexProperty()");
}
@Override
public HugeVertex readVertex(HugeGraph graph, BackendEntry backendEntry) {
E.checkNotNull(graph, "serializer graph");
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
// Parse label
String labelId = entry.column(this.formatSyspropName(HugeKeys.LABEL));
VertexLabel vertexLabel = VertexLabel.NONE;
if (labelId != null) {
vertexLabel = graph.vertexLabelOrNone(readId(labelId));
}
Id id = IdUtil.readString(entry.id().asString());
HugeVertex vertex = new HugeVertex(graph, id, vertexLabel);
String expiredTime = entry.column(this.formatSyspropName(
HugeKeys.EXPIRED_TIME));
// Expired time is null when backend entry is fake vertex with edges
if (expiredTime != null) {
vertex.expiredTime(readLong(expiredTime));
}
// Parse all properties or edges of a Vertex
for (String name : entry.columnNames()) {
this.parseColumn(name, entry.column(name), vertex);
}
return vertex;
}
@Override
public BackendEntry writeEdge(HugeEdge edge) {
Id id = IdGenerator.of(edge.idWithDirection().asString());
TextBackendEntry entry = newBackendEntry(edge.type(), id);
entry.column(this.formatEdgeName(edge), this.formatEdgeValue(edge));
return entry;
}
@Override
public BackendEntry writeEdgeProperty(HugeEdgeProperty<?> prop) {
HugeEdge edge = prop.element();
Id id = IdGenerator.of(edge.idWithDirection().asString());
TextBackendEntry entry = newBackendEntry(edge.type(), id);
entry.subId(IdGenerator.of(prop.key()));
entry.column(this.formatEdgeName(edge), this.formatEdgeValue(edge));
return entry;
}
@Override
public HugeEdge readEdge(HugeGraph graph, BackendEntry backendEntry) {
E.checkNotNull(graph, "serializer graph");
// TODO: implement
throw new NotImplementedException("Unsupported readEdge()");
}
@Override
public CIter<Edge> readEdges(HugeGraph graph, BackendEntry bytesEntry) {
E.checkNotNull(graph, "serializer graph");
// TODO: implement
throw new NotImplementedException("Unsupported readEdges()");
}
@Override
public BackendEntry writeIndex(HugeIndex index) {
TextBackendEntry entry = newBackendEntry(index.type(), index.id());
if (index.fieldValues() == null && index.elementIds().isEmpty()) {
/*
* When field-values is null and elementIds size is 0, it is
* meaningful for deletion of index data in secondary/range index.
*/
entry.column(HugeKeys.INDEX_LABEL_ID,
writeId(index.indexLabelId()));
} else {
// TODO: field-values may be a number (range index)
entry.column(formatSyspropName(HugeKeys.FIELD_VALUES),
JsonUtil.toJson(index.fieldValues()));
entry.column(formatSyspropName(HugeKeys.INDEX_LABEL_ID),
writeId(index.indexLabelId()));
entry.column(formatSyspropName(HugeKeys.ELEMENT_IDS),
writeElementId(index.elementId(), index.expiredTime()));
entry.subId(index.elementId());
}
return entry;
}
@Override
public HugeIndex readIndex(HugeGraph graph, ConditionQuery query,
BackendEntry backendEntry) {
E.checkNotNull(graph, "serializer graph");
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
String indexValues = entry.column(
formatSyspropName(HugeKeys.FIELD_VALUES));
String indexLabelId = entry.column(
formatSyspropName(HugeKeys.INDEX_LABEL_ID));
String elemIds = entry.column(
formatSyspropName(HugeKeys.ELEMENT_IDS));
IndexLabel indexLabel = IndexLabel.label(graph, readId(indexLabelId));
HugeIndex index = new HugeIndex(graph, indexLabel);
index.fieldValues(JsonUtil.fromJson(indexValues, Object.class));
for (IdWithExpiredTime elemId : readElementIds(elemIds)) {
long expiredTime = elemId.expiredTime();
Id id;
if (indexLabel.queryType().isEdge()) {
id = EdgeId.parse(elemId.id().asString());
} else {
id = elemId.id();
}
index.elementIds(id, expiredTime);
}
// Memory backend might return empty BackendEntry
return index;
}
@Override
public TextBackendEntry writeId(HugeType type, Id id) {
id = this.writeQueryId(type, id);
return newBackendEntry(type, id);
}
@Override
protected Id writeQueryId(HugeType type, Id id) {
if (type.isEdge()) {
id = IdGenerator.of(writeEdgeId(id, true));
} else if (type.isGraph()) {
id = IdGenerator.of(writeEntryId(id));
} else {
assert type.isSchema();
id = IdGenerator.of(writeId(id));
}
return id;
}
@Override
protected Query writeQueryEdgeCondition(Query query) {
ConditionQuery cq = (ConditionQuery) query;
if (cq.hasRangeCondition()) {
return this.writeQueryEdgeRangeCondition(cq);
} else {
return this.writeQueryEdgePrefixCondition(cq);
}
}
private Query writeQueryEdgeRangeCondition(ConditionQuery cq) {
List<Condition> sortValues = cq.syspropConditions(HugeKeys.SORT_VALUES);
E.checkArgument(sortValues.size() >= 1 && sortValues.size() <= 2,
"Edge range query must be with sort-values range");
// Would ignore target vertex
Object vertex = cq.condition(HugeKeys.OWNER_VERTEX);
Object direction = cq.condition(HugeKeys.DIRECTION);
if (direction == null) {
direction = Directions.OUT;
}
Object label = cq.condition(HugeKeys.LABEL);
List<String> start = new ArrayList<>(cq.conditionsSize());
start.add(writeEntryId((Id) vertex));
start.add(writeType(((Directions) direction).type()));
start.add(writeId((Id) label));
List<String> end = new ArrayList<>(start);
Condition.RangeConditions range = new Condition.RangeConditions(sortValues);
if (range.keyMin() != null) {
start.add((String) range.keyMin());
}
if (range.keyMax() != null) {
end.add((String) range.keyMax());
}
// Sort-value will be empty if there is no start sort-value
String startId = EdgeId.concat(start.toArray(new String[0]));
// Set endId as prefix if there is no end sort-value
String endId = EdgeId.concat(end.toArray(new String[0]));
if (range.keyMax() == null) {
return new IdPrefixQuery(cq, IdGenerator.of(startId),
range.keyMinEq(), IdGenerator.of(endId));
}
return new IdRangeQuery(cq, IdGenerator.of(startId), range.keyMinEq(),
IdGenerator.of(endId), range.keyMaxEq());
}
private Query writeQueryEdgePrefixCondition(ConditionQuery cq) {
// Convert query-by-condition to query-by-id
List<String> condParts = new ArrayList<>(cq.conditionsSize());
for (HugeKeys key : EdgeId.KEYS) {
Object value = cq.condition(key);
if (value == null) {
break;
}
// Serialize condition value
if (key == HugeKeys.OWNER_VERTEX || key == HugeKeys.OTHER_VERTEX) {
condParts.add(writeEntryId((Id) value));
} else if (key == HugeKeys.DIRECTION) {
condParts.add(writeType(((Directions) value).type()));
} else if (key == HugeKeys.LABEL) {
condParts.add(writeId((Id) value));
} else {
condParts.add(value.toString());
}
}
if (!condParts.isEmpty()) {
// Conditions to id
String id = EdgeId.concat(condParts.toArray(new String[0]));
return new IdPrefixQuery(cq, IdGenerator.of(id));
}
return null;
}
@Override
protected Query writeQueryCondition(Query query) {
ConditionQuery result = (ConditionQuery) query;
// No user-prop when serialize
assert result.allSysprop();
for (Condition.Relation r : result.relations()) {
// Serialize key
if (query.resultType().isSchema()) {
r.serialKey(((HugeKeys) r.key()).string());
} else {
r.serialKey(formatSyspropName((HugeKeys) r.key()));
}
if (r.value() instanceof Id) {
// Serialize id value
r.serialValue(writeId((Id) r.value()));
} else {
// Serialize other type value
r.serialValue(JsonUtil.toJson(r.value()));
}
if (r.relation() == Condition.RelationType.CONTAINS_KEY) {
// Serialize has-key
String key = (String) r.serialValue();
r.serialValue(formatPropertyName(key));
}
}
return result;
}
@Override
public BackendEntry writeVertexLabel(VertexLabel vertexLabel) {
TextBackendEntry entry = newBackendEntry(vertexLabel);
entry.column(HugeKeys.NAME, JsonUtil.toJson(vertexLabel.name()));
entry.column(HugeKeys.ID_STRATEGY,
JsonUtil.toJson(vertexLabel.idStrategy()));
entry.column(HugeKeys.PROPERTIES,
writeIds(vertexLabel.properties()));
entry.column(HugeKeys.PRIMARY_KEYS,
writeIds(vertexLabel.primaryKeys()));
entry.column(HugeKeys.NULLABLE_KEYS,
writeIds(vertexLabel.nullableKeys()));
entry.column(HugeKeys.INDEX_LABELS,
writeIds(vertexLabel.indexLabels()));
entry.column(HugeKeys.ENABLE_LABEL_INDEX,
JsonUtil.toJson(vertexLabel.enableLabelIndex()));
writeUserdata(vertexLabel, entry);
entry.column(HugeKeys.STATUS,
JsonUtil.toJson(vertexLabel.status()));
return entry;
}
@Override
public VertexLabel readVertexLabel(HugeGraph graph,
BackendEntry backendEntry) {
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
Id id = readId(entry.id());
String name = JsonUtil.fromJson(entry.column(HugeKeys.NAME),
String.class);
String idStrategy = entry.column(HugeKeys.ID_STRATEGY);
String properties = entry.column(HugeKeys.PROPERTIES);
String primaryKeys = entry.column(HugeKeys.PRIMARY_KEYS);
String nullableKeys = entry.column(HugeKeys.NULLABLE_KEYS);
String indexLabels = entry.column(HugeKeys.INDEX_LABELS);
String enableLabelIndex = entry.column(HugeKeys.ENABLE_LABEL_INDEX);
String status = entry.column(HugeKeys.STATUS);
VertexLabel vertexLabel = new VertexLabel(graph, id, name);
vertexLabel.idStrategy(JsonUtil.fromJson(idStrategy,
IdStrategy.class));
vertexLabel.properties(readIds(properties));
vertexLabel.primaryKeys(readIds(primaryKeys));
vertexLabel.nullableKeys(readIds(nullableKeys));
vertexLabel.addIndexLabels(readIds(indexLabels));
vertexLabel.enableLabelIndex(JsonUtil.fromJson(enableLabelIndex,
Boolean.class));
readUserdata(vertexLabel, entry);
vertexLabel.status(JsonUtil.fromJson(status, SchemaStatus.class));
return vertexLabel;
}
@Override
public BackendEntry writeEdgeLabel(EdgeLabel edgeLabel) {
TextBackendEntry entry = newBackendEntry(edgeLabel);
entry.column(HugeKeys.NAME, JsonUtil.toJson(edgeLabel.name()));
entry.column(HugeKeys.SOURCE_LABEL, writeId(edgeLabel.sourceLabel()));
entry.column(HugeKeys.TARGET_LABEL, writeId(edgeLabel.targetLabel()));
entry.column(HugeKeys.FREQUENCY,
JsonUtil.toJson(edgeLabel.frequency()));
entry.column(HugeKeys.PROPERTIES, writeIds(edgeLabel.properties()));
entry.column(HugeKeys.SORT_KEYS, writeIds(edgeLabel.sortKeys()));
entry.column(HugeKeys.NULLABLE_KEYS,
writeIds(edgeLabel.nullableKeys()));
entry.column(HugeKeys.INDEX_LABELS, writeIds(edgeLabel.indexLabels()));
entry.column(HugeKeys.ENABLE_LABEL_INDEX,
JsonUtil.toJson(edgeLabel.enableLabelIndex()));
writeUserdata(edgeLabel, entry);
entry.column(HugeKeys.STATUS,
JsonUtil.toJson(edgeLabel.status()));
entry.column(HugeKeys.TTL, JsonUtil.toJson(edgeLabel.ttl()));
entry.column(HugeKeys.TTL_START_TIME,
writeId(edgeLabel.ttlStartTime()));
return entry;
}
@Override
public EdgeLabel readEdgeLabel(HugeGraph graph,
BackendEntry backendEntry) {
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
Id id = readId(entry.id());
String name = JsonUtil.fromJson(entry.column(HugeKeys.NAME),
String.class);
String sourceLabel = entry.column(HugeKeys.SOURCE_LABEL);
String targetLabel = entry.column(HugeKeys.TARGET_LABEL);
String frequency = entry.column(HugeKeys.FREQUENCY);
String sortKeys = entry.column(HugeKeys.SORT_KEYS);
String nullablekeys = entry.column(HugeKeys.NULLABLE_KEYS);
String properties = entry.column(HugeKeys.PROPERTIES);
String indexLabels = entry.column(HugeKeys.INDEX_LABELS);
String enableLabelIndex = entry.column(HugeKeys.ENABLE_LABEL_INDEX);
String status = entry.column(HugeKeys.STATUS);
String ttl = entry.column(HugeKeys.TTL);
String ttlStartTime = entry.column(HugeKeys.TTL_START_TIME);
EdgeLabel edgeLabel = new EdgeLabel(graph, id, name);
edgeLabel.sourceLabel(readId(sourceLabel));
edgeLabel.targetLabel(readId(targetLabel));
edgeLabel.frequency(JsonUtil.fromJson(frequency, Frequency.class));
edgeLabel.properties(readIds(properties));
edgeLabel.sortKeys(readIds(sortKeys));
edgeLabel.nullableKeys(readIds(nullablekeys));
edgeLabel.addIndexLabels(readIds(indexLabels));
edgeLabel.enableLabelIndex(JsonUtil.fromJson(enableLabelIndex,
Boolean.class));
readUserdata(edgeLabel, entry);
edgeLabel.status(JsonUtil.fromJson(status, SchemaStatus.class));
edgeLabel.ttl(JsonUtil.fromJson(ttl, Long.class));
edgeLabel.ttlStartTime(readId(ttlStartTime));
return edgeLabel;
}
@Override
public BackendEntry writePropertyKey(PropertyKey propertyKey) {
TextBackendEntry entry = newBackendEntry(propertyKey);
entry.column(HugeKeys.NAME, JsonUtil.toJson(propertyKey.name()));
entry.column(HugeKeys.DATA_TYPE,
JsonUtil.toJson(propertyKey.dataType()));
entry.column(HugeKeys.CARDINALITY,
JsonUtil.toJson(propertyKey.cardinality()));
entry.column(HugeKeys.AGGREGATE_TYPE,
JsonUtil.toJson(propertyKey.aggregateType()));
entry.column(HugeKeys.WRITE_TYPE,
JsonUtil.toJson(propertyKey.writeType()));
entry.column(HugeKeys.PROPERTIES, writeIds(propertyKey.properties()));
writeUserdata(propertyKey, entry);
entry.column(HugeKeys.STATUS,
JsonUtil.toJson(propertyKey.status()));
return entry;
}
@Override
public PropertyKey readPropertyKey(HugeGraph graph,
BackendEntry backendEntry) {
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
Id id = readId(entry.id());
String name = JsonUtil.fromJson(entry.column(HugeKeys.NAME),
String.class);
String dataType = entry.column(HugeKeys.DATA_TYPE);
String cardinality = entry.column(HugeKeys.CARDINALITY);
String aggregateType = entry.column(HugeKeys.AGGREGATE_TYPE);
String writeType = entry.column(HugeKeys.WRITE_TYPE);
String properties = entry.column(HugeKeys.PROPERTIES);
String status = entry.column(HugeKeys.STATUS);
PropertyKey propertyKey = new PropertyKey(graph, id, name);
propertyKey.dataType(JsonUtil.fromJson(dataType, DataType.class));
propertyKey.cardinality(JsonUtil.fromJson(cardinality,
Cardinality.class));
propertyKey.aggregateType(JsonUtil.fromJson(aggregateType,
AggregateType.class));
propertyKey.writeType(JsonUtil.fromJson(writeType,
WriteType.class));
propertyKey.properties(readIds(properties));
readUserdata(propertyKey, entry);
propertyKey.status(JsonUtil.fromJson(status, SchemaStatus.class));
return propertyKey;
}
@Override
public BackendEntry writeIndexLabel(IndexLabel indexLabel) {
TextBackendEntry entry = newBackendEntry(indexLabel);
entry.column(HugeKeys.NAME, JsonUtil.toJson(indexLabel.name()));
entry.column(HugeKeys.BASE_TYPE,
JsonUtil.toJson(indexLabel.baseType()));
entry.column(HugeKeys.BASE_VALUE, writeId(indexLabel.baseValue()));
entry.column(HugeKeys.INDEX_TYPE,
JsonUtil.toJson(indexLabel.indexType()));
entry.column(HugeKeys.FIELDS, writeIds(indexLabel.indexFields()));
writeUserdata(indexLabel, entry);
entry.column(HugeKeys.STATUS,
JsonUtil.toJson(indexLabel.status()));
return entry;
}
@Override
public IndexLabel readIndexLabel(HugeGraph graph,
BackendEntry backendEntry) {
if (backendEntry == null) {
return null;
}
TextBackendEntry entry = this.convertEntry(backendEntry);
Id id = readId(entry.id());
String name = JsonUtil.fromJson(entry.column(HugeKeys.NAME),
String.class);
String baseType = entry.column(HugeKeys.BASE_TYPE);
String baseValue = entry.column(HugeKeys.BASE_VALUE);
String indexType = entry.column(HugeKeys.INDEX_TYPE);
String indexFields = entry.column(HugeKeys.FIELDS);
String status = entry.column(HugeKeys.STATUS);
IndexLabel indexLabel = new IndexLabel(graph, id, name);
indexLabel.baseType(JsonUtil.fromJson(baseType, HugeType.class));
indexLabel.baseValue(readId(baseValue));
indexLabel.indexType(JsonUtil.fromJson(indexType, IndexType.class));
indexLabel.indexFields(readIds(indexFields));
readUserdata(indexLabel, entry);
indexLabel.status(JsonUtil.fromJson(status, SchemaStatus.class));
return indexLabel;
}
private String writeEdgeId(Id id, boolean withOwnerVertex) {
EdgeId edgeId;
if (id instanceof EdgeId) {
edgeId = (EdgeId) id;
} else {
edgeId = EdgeId.parse(id.asString());
}
List<String> list = new ArrayList<>(5);
if (withOwnerVertex) {
list.add(writeEntryId(edgeId.ownerVertexId()));
}
// Edge name: type + edge-label-name + sortKeys + targetVertex
list.add(writeType(edgeId.direction().type()));
list.add(writeId(edgeId.edgeLabelId()));
list.add(writeId(edgeId.subLabelId()));
list.add(writeEdgeName(edgeId.sortValues()));
list.add(writeEntryId(edgeId.otherVertexId()));
return EdgeId.concat(list.toArray(new String[0]));
}
private static String writeType(HugeType type) {
return type.string();
}
private static String writeEntryId(Id id) {
return IdUtil.writeString(id);
}
private static Id readEntryId(String id) {
return IdUtil.readString(id);
}
private static String writeEdgeName(String name) {
return name + EDGE_NAME_ENDING;
}
private static String readEdgeName(String name) {
E.checkState(name.endsWith(EDGE_NAME_ENDING),
"Invalid edge name: %s", name);
return name.substring(0, name.length() - 1);
}
private static String writeId(Id id) {
if (id.number()) {
return JsonUtil.toJson(id.asLong());
} else {
return JsonUtil.toJson(id.asString());
}
}
private static Id readId(String id) {
Object value = JsonUtil.fromJson(id, Object.class);
if (value instanceof Number) {
return IdGenerator.of(((Number) value).longValue());
} else {
assert value instanceof String;
return IdGenerator.of(value.toString());
}
}
private static Id readId(Id id) {
return readId(id.asString());
}
private static String writeIds(Collection<Id> ids) {
Object[] array = new Object[ids.size()];
int i = 0;
for (Id id : ids) {
if (id.number()) {
array[i++] = id.asLong();
} else {
array[i++] = id.asString();
}
}
return JsonUtil.toJson(array);
}
private static Id[] readIds(String str) {
Object[] values = JsonUtil.fromJson(str, Object[].class);
Id[] ids = new Id[values.length];
for (int i = 0; i < values.length; i++) {
Object value = values[i];
if (value instanceof Number) {
ids[i] = IdGenerator.of(((Number) value).longValue());
} else {
assert value instanceof String;
ids[i] = IdGenerator.of(value.toString());
}
}
return ids;
}
private static String writeElementId(Id id, long expiredTime) {
Object[] array = new Object[1];
Object idValue = id.number() ? id.asLong() : id.asString();
if (expiredTime <= 0L) {
array[0] = id;
} else {
array[0] = ImmutableMap.of(HugeKeys.ID.string(), idValue,
HugeKeys.EXPIRED_TIME.string(),
expiredTime);
}
return JsonUtil.toJson(array);
}
private static IdWithExpiredTime[] readElementIds(String str) {
Object[] values = JsonUtil.fromJson(str, Object[].class);
IdWithExpiredTime[] ids = new IdWithExpiredTime[values.length];
for (int i = 0; i < values.length; i++) {
Object idValue;
long expiredTime;
if (values[i] instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) values[i];
idValue = map.get(HugeKeys.ID.string());
expiredTime = ((Number) map.get(
HugeKeys.EXPIRED_TIME.string())).longValue();
} else {
idValue = values[i];
expiredTime = 0L;
}
Id id;
if (idValue instanceof Number) {
id = IdGenerator.of(((Number) idValue).longValue());
} else {
assert idValue instanceof String;
id = IdGenerator.of(idValue.toString());
}
ids[i] = new IdWithExpiredTime(id, expiredTime);
}
return ids;
}
private static String writeLong(long value) {
return JsonUtil.toJson(value);
}
private static long readLong(String value) {
return Long.parseLong(value);
}
private static void writeUserdata(SchemaElement schema,
TextBackendEntry entry) {
entry.column(HugeKeys.USER_DATA, JsonUtil.toJson(schema.userdata()));
}
private static void readUserdata(SchemaElement schema,
TextBackendEntry entry) {
// Parse all user data of a schema element
String userdataStr = entry.column(HugeKeys.USER_DATA);
@SuppressWarnings("unchecked")
Map<String, Object> userdata = JsonUtil.fromJson(userdataStr,
Map.class);
for (Map.Entry<String, Object> e : userdata.entrySet()) {
schema.userdata(e.getKey(), e.getValue());
}
}
}
|
googleapis/google-cloud-java | 36,719 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListPurchasableSkusResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Response message for ListPurchasableSkus.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListPurchasableSkusResponse}
*/
public final class ListPurchasableSkusResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListPurchasableSkusResponse)
ListPurchasableSkusResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPurchasableSkusResponse.newBuilder() to construct.
private ListPurchasableSkusResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPurchasableSkusResponse() {
purchasableSkus_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPurchasableSkusResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_ListPurchasableSkusResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_ListPurchasableSkusResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListPurchasableSkusResponse.class,
com.google.cloud.channel.v1.ListPurchasableSkusResponse.Builder.class);
}
public static final int PURCHASABLE_SKUS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.channel.v1.PurchasableSku> purchasableSkus_;
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.channel.v1.PurchasableSku> getPurchasableSkusList() {
return purchasableSkus_;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.channel.v1.PurchasableSkuOrBuilder>
getPurchasableSkusOrBuilderList() {
return purchasableSkus_;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
@java.lang.Override
public int getPurchasableSkusCount() {
return purchasableSkus_.size();
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.PurchasableSku getPurchasableSkus(int index) {
return purchasableSkus_.get(index);
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.PurchasableSkuOrBuilder getPurchasableSkusOrBuilder(
int index) {
return purchasableSkus_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < purchasableSkus_.size(); i++) {
output.writeMessage(1, purchasableSkus_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < purchasableSkus_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, purchasableSkus_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.ListPurchasableSkusResponse)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.ListPurchasableSkusResponse other =
(com.google.cloud.channel.v1.ListPurchasableSkusResponse) obj;
if (!getPurchasableSkusList().equals(other.getPurchasableSkusList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPurchasableSkusCount() > 0) {
hash = (37 * hash) + PURCHASABLE_SKUS_FIELD_NUMBER;
hash = (53 * hash) + getPurchasableSkusList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.channel.v1.ListPurchasableSkusResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListPurchasableSkus.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListPurchasableSkusResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListPurchasableSkusResponse)
com.google.cloud.channel.v1.ListPurchasableSkusResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_ListPurchasableSkusResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_ListPurchasableSkusResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListPurchasableSkusResponse.class,
com.google.cloud.channel.v1.ListPurchasableSkusResponse.Builder.class);
}
// Construct using com.google.cloud.channel.v1.ListPurchasableSkusResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (purchasableSkusBuilder_ == null) {
purchasableSkus_ = java.util.Collections.emptyList();
} else {
purchasableSkus_ = null;
purchasableSkusBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ServiceProto
.internal_static_google_cloud_channel_v1_ListPurchasableSkusResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListPurchasableSkusResponse getDefaultInstanceForType() {
return com.google.cloud.channel.v1.ListPurchasableSkusResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.ListPurchasableSkusResponse build() {
com.google.cloud.channel.v1.ListPurchasableSkusResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListPurchasableSkusResponse buildPartial() {
com.google.cloud.channel.v1.ListPurchasableSkusResponse result =
new com.google.cloud.channel.v1.ListPurchasableSkusResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.channel.v1.ListPurchasableSkusResponse result) {
if (purchasableSkusBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
purchasableSkus_ = java.util.Collections.unmodifiableList(purchasableSkus_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.purchasableSkus_ = purchasableSkus_;
} else {
result.purchasableSkus_ = purchasableSkusBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.channel.v1.ListPurchasableSkusResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.ListPurchasableSkusResponse) {
return mergeFrom((com.google.cloud.channel.v1.ListPurchasableSkusResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.channel.v1.ListPurchasableSkusResponse other) {
if (other == com.google.cloud.channel.v1.ListPurchasableSkusResponse.getDefaultInstance())
return this;
if (purchasableSkusBuilder_ == null) {
if (!other.purchasableSkus_.isEmpty()) {
if (purchasableSkus_.isEmpty()) {
purchasableSkus_ = other.purchasableSkus_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePurchasableSkusIsMutable();
purchasableSkus_.addAll(other.purchasableSkus_);
}
onChanged();
}
} else {
if (!other.purchasableSkus_.isEmpty()) {
if (purchasableSkusBuilder_.isEmpty()) {
purchasableSkusBuilder_.dispose();
purchasableSkusBuilder_ = null;
purchasableSkus_ = other.purchasableSkus_;
bitField0_ = (bitField0_ & ~0x00000001);
purchasableSkusBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPurchasableSkusFieldBuilder()
: null;
} else {
purchasableSkusBuilder_.addAllMessages(other.purchasableSkus_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.channel.v1.PurchasableSku m =
input.readMessage(
com.google.cloud.channel.v1.PurchasableSku.parser(), extensionRegistry);
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
purchasableSkus_.add(m);
} else {
purchasableSkusBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.channel.v1.PurchasableSku> purchasableSkus_ =
java.util.Collections.emptyList();
private void ensurePurchasableSkusIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
purchasableSkus_ =
new java.util.ArrayList<com.google.cloud.channel.v1.PurchasableSku>(purchasableSkus_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.PurchasableSku,
com.google.cloud.channel.v1.PurchasableSku.Builder,
com.google.cloud.channel.v1.PurchasableSkuOrBuilder>
purchasableSkusBuilder_;
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.PurchasableSku> getPurchasableSkusList() {
if (purchasableSkusBuilder_ == null) {
return java.util.Collections.unmodifiableList(purchasableSkus_);
} else {
return purchasableSkusBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public int getPurchasableSkusCount() {
if (purchasableSkusBuilder_ == null) {
return purchasableSkus_.size();
} else {
return purchasableSkusBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public com.google.cloud.channel.v1.PurchasableSku getPurchasableSkus(int index) {
if (purchasableSkusBuilder_ == null) {
return purchasableSkus_.get(index);
} else {
return purchasableSkusBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder setPurchasableSkus(int index, com.google.cloud.channel.v1.PurchasableSku value) {
if (purchasableSkusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePurchasableSkusIsMutable();
purchasableSkus_.set(index, value);
onChanged();
} else {
purchasableSkusBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder setPurchasableSkus(
int index, com.google.cloud.channel.v1.PurchasableSku.Builder builderForValue) {
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
purchasableSkus_.set(index, builderForValue.build());
onChanged();
} else {
purchasableSkusBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder addPurchasableSkus(com.google.cloud.channel.v1.PurchasableSku value) {
if (purchasableSkusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePurchasableSkusIsMutable();
purchasableSkus_.add(value);
onChanged();
} else {
purchasableSkusBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder addPurchasableSkus(int index, com.google.cloud.channel.v1.PurchasableSku value) {
if (purchasableSkusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePurchasableSkusIsMutable();
purchasableSkus_.add(index, value);
onChanged();
} else {
purchasableSkusBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder addPurchasableSkus(
com.google.cloud.channel.v1.PurchasableSku.Builder builderForValue) {
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
purchasableSkus_.add(builderForValue.build());
onChanged();
} else {
purchasableSkusBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder addPurchasableSkus(
int index, com.google.cloud.channel.v1.PurchasableSku.Builder builderForValue) {
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
purchasableSkus_.add(index, builderForValue.build());
onChanged();
} else {
purchasableSkusBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder addAllPurchasableSkus(
java.lang.Iterable<? extends com.google.cloud.channel.v1.PurchasableSku> values) {
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, purchasableSkus_);
onChanged();
} else {
purchasableSkusBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder clearPurchasableSkus() {
if (purchasableSkusBuilder_ == null) {
purchasableSkus_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
purchasableSkusBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public Builder removePurchasableSkus(int index) {
if (purchasableSkusBuilder_ == null) {
ensurePurchasableSkusIsMutable();
purchasableSkus_.remove(index);
onChanged();
} else {
purchasableSkusBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public com.google.cloud.channel.v1.PurchasableSku.Builder getPurchasableSkusBuilder(int index) {
return getPurchasableSkusFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public com.google.cloud.channel.v1.PurchasableSkuOrBuilder getPurchasableSkusOrBuilder(
int index) {
if (purchasableSkusBuilder_ == null) {
return purchasableSkus_.get(index);
} else {
return purchasableSkusBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public java.util.List<? extends com.google.cloud.channel.v1.PurchasableSkuOrBuilder>
getPurchasableSkusOrBuilderList() {
if (purchasableSkusBuilder_ != null) {
return purchasableSkusBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(purchasableSkus_);
}
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public com.google.cloud.channel.v1.PurchasableSku.Builder addPurchasableSkusBuilder() {
return getPurchasableSkusFieldBuilder()
.addBuilder(com.google.cloud.channel.v1.PurchasableSku.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public com.google.cloud.channel.v1.PurchasableSku.Builder addPurchasableSkusBuilder(int index) {
return getPurchasableSkusFieldBuilder()
.addBuilder(index, com.google.cloud.channel.v1.PurchasableSku.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of SKUs requested.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.PurchasableSku purchasable_skus = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.PurchasableSku.Builder>
getPurchasableSkusBuilderList() {
return getPurchasableSkusFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.PurchasableSku,
com.google.cloud.channel.v1.PurchasableSku.Builder,
com.google.cloud.channel.v1.PurchasableSkuOrBuilder>
getPurchasableSkusFieldBuilder() {
if (purchasableSkusBuilder_ == null) {
purchasableSkusBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.PurchasableSku,
com.google.cloud.channel.v1.PurchasableSku.Builder,
com.google.cloud.channel.v1.PurchasableSkuOrBuilder>(
purchasableSkus_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
purchasableSkus_ = null;
}
return purchasableSkusBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListPurchasableSkusResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListPurchasableSkusResponse)
private static final com.google.cloud.channel.v1.ListPurchasableSkusResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListPurchasableSkusResponse();
}
public static com.google.cloud.channel.v1.ListPurchasableSkusResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPurchasableSkusResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPurchasableSkusResponse>() {
@java.lang.Override
public ListPurchasableSkusResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPurchasableSkusResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPurchasableSkusResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListPurchasableSkusResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,887 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3/DeploymentsClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3.stub.DeploymentsStub;
import com.google.cloud.dialogflow.cx.v3.stub.DeploymentsStubSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service for managing
* [Deployments][google.cloud.dialogflow.cx.v3.Deployment].
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* DeploymentName name =
* DeploymentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[DEPLOYMENT]");
* Deployment response = deploymentsClient.getDeployment(name);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the DeploymentsClient object to clean up resources such as
* threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> ListDeployments</td>
* <td><p> Returns the list of all deployments in the specified [Environment][google.cloud.dialogflow.cx.v3.Environment].</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listDeployments(ListDeploymentsRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> listDeployments(EnvironmentName parent)
* <li><p> listDeployments(String parent)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listDeploymentsPagedCallable()
* <li><p> listDeploymentsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetDeployment</td>
* <td><p> Retrieves the specified [Deployment][google.cloud.dialogflow.cx.v3.Deployment].</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getDeployment(GetDeploymentRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> getDeployment(DeploymentName name)
* <li><p> getDeployment(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getDeploymentCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListLocations</td>
* <td><p> Lists information about the supported locations for this service.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listLocations(ListLocationsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listLocationsPagedCallable()
* <li><p> listLocationsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetLocation</td>
* <td><p> Gets information about a location.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getLocation(GetLocationRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getLocationCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of DeploymentsSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DeploymentsSettings deploymentsSettings =
* DeploymentsSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* DeploymentsClient deploymentsClient = DeploymentsClient.create(deploymentsSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DeploymentsSettings deploymentsSettings =
* DeploymentsSettings.newBuilder().setEndpoint(myEndpoint).build();
* DeploymentsClient deploymentsClient = DeploymentsClient.create(deploymentsSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DeploymentsSettings deploymentsSettings = DeploymentsSettings.newHttpJsonBuilder().build();
* DeploymentsClient deploymentsClient = DeploymentsClient.create(deploymentsSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class DeploymentsClient implements BackgroundResource {
private final DeploymentsSettings settings;
private final DeploymentsStub stub;
/** Constructs an instance of DeploymentsClient with default settings. */
public static final DeploymentsClient create() throws IOException {
return create(DeploymentsSettings.newBuilder().build());
}
/**
* Constructs an instance of DeploymentsClient, using the given settings. The channels are created
* based on the settings passed in, or defaults for any settings that are not set.
*/
public static final DeploymentsClient create(DeploymentsSettings settings) throws IOException {
return new DeploymentsClient(settings);
}
/**
* Constructs an instance of DeploymentsClient, using the given stub for making calls. This is for
* advanced usage - prefer using create(DeploymentsSettings).
*/
public static final DeploymentsClient create(DeploymentsStub stub) {
return new DeploymentsClient(stub);
}
/**
* Constructs an instance of DeploymentsClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected DeploymentsClient(DeploymentsSettings settings) throws IOException {
this.settings = settings;
this.stub = ((DeploymentsStubSettings) settings.getStubSettings()).createStub();
}
protected DeploymentsClient(DeploymentsStub stub) {
this.settings = null;
this.stub = stub;
}
public final DeploymentsSettings getSettings() {
return settings;
}
public DeploymentsStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of all deployments in the specified
* [Environment][google.cloud.dialogflow.cx.v3.Environment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* EnvironmentName parent =
* EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]");
* for (Deployment element : deploymentsClient.listDeployments(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list
* all environments for. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/environments/<EnvironmentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListDeploymentsPagedResponse listDeployments(EnvironmentName parent) {
ListDeploymentsRequest request =
ListDeploymentsRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listDeployments(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of all deployments in the specified
* [Environment][google.cloud.dialogflow.cx.v3.Environment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* String parent =
* EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]").toString();
* for (Deployment element : deploymentsClient.listDeployments(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list
* all environments for. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/environments/<EnvironmentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListDeploymentsPagedResponse listDeployments(String parent) {
ListDeploymentsRequest request = ListDeploymentsRequest.newBuilder().setParent(parent).build();
return listDeployments(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of all deployments in the specified
* [Environment][google.cloud.dialogflow.cx.v3.Environment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListDeploymentsRequest request =
* ListDeploymentsRequest.newBuilder()
* .setParent(
* EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]")
* .toString())
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Deployment element : deploymentsClient.listDeployments(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListDeploymentsPagedResponse listDeployments(ListDeploymentsRequest request) {
return listDeploymentsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of all deployments in the specified
* [Environment][google.cloud.dialogflow.cx.v3.Environment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListDeploymentsRequest request =
* ListDeploymentsRequest.newBuilder()
* .setParent(
* EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]")
* .toString())
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Deployment> future =
* deploymentsClient.listDeploymentsPagedCallable().futureCall(request);
* // Do something.
* for (Deployment element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListDeploymentsRequest, ListDeploymentsPagedResponse>
listDeploymentsPagedCallable() {
return stub.listDeploymentsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of all deployments in the specified
* [Environment][google.cloud.dialogflow.cx.v3.Environment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListDeploymentsRequest request =
* ListDeploymentsRequest.newBuilder()
* .setParent(
* EnvironmentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]")
* .toString())
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListDeploymentsResponse response =
* deploymentsClient.listDeploymentsCallable().call(request);
* for (Deployment element : response.getDeploymentsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListDeploymentsRequest, ListDeploymentsResponse>
listDeploymentsCallable() {
return stub.listDeploymentsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* DeploymentName name =
* DeploymentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[DEPLOYMENT]");
* Deployment response = deploymentsClient.getDeployment(name);
* }
* }</pre>
*
* @param name Required. The name of the [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/environments/<EnvironmentID>/deployments/<DeploymentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Deployment getDeployment(DeploymentName name) {
GetDeploymentRequest request =
GetDeploymentRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getDeployment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* String name =
* DeploymentName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[DEPLOYMENT]")
* .toString();
* Deployment response = deploymentsClient.getDeployment(name);
* }
* }</pre>
*
* @param name Required. The name of the [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/environments/<EnvironmentID>/deployments/<DeploymentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Deployment getDeployment(String name) {
GetDeploymentRequest request = GetDeploymentRequest.newBuilder().setName(name).build();
return getDeployment(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* GetDeploymentRequest request =
* GetDeploymentRequest.newBuilder()
* .setName(
* DeploymentName.of(
* "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[DEPLOYMENT]")
* .toString())
* .build();
* Deployment response = deploymentsClient.getDeployment(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Deployment getDeployment(GetDeploymentRequest request) {
return getDeploymentCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified [Deployment][google.cloud.dialogflow.cx.v3.Deployment].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* GetDeploymentRequest request =
* GetDeploymentRequest.newBuilder()
* .setName(
* DeploymentName.of(
* "[PROJECT]", "[LOCATION]", "[AGENT]", "[ENVIRONMENT]", "[DEPLOYMENT]")
* .toString())
* .build();
* ApiFuture<Deployment> future = deploymentsClient.getDeploymentCallable().futureCall(request);
* // Do something.
* Deployment response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetDeploymentRequest, Deployment> getDeploymentCallable() {
return stub.getDeploymentCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Location element : deploymentsClient.listLocations(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) {
return listLocationsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Location> future =
* deploymentsClient.listLocationsPagedCallable().futureCall(request);
* // Do something.
* for (Location element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return stub.listLocationsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListLocationsResponse response = deploymentsClient.listLocationsCallable().call(request);
* for (Location element : response.getLocationsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return stub.listLocationsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* Location response = deploymentsClient.getLocation(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Location getLocation(GetLocationRequest request) {
return getLocationCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DeploymentsClient deploymentsClient = DeploymentsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* ApiFuture<Location> future = deploymentsClient.getLocationCallable().futureCall(request);
* // Do something.
* Location response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return stub.getLocationCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListDeploymentsPagedResponse
extends AbstractPagedListResponse<
ListDeploymentsRequest,
ListDeploymentsResponse,
Deployment,
ListDeploymentsPage,
ListDeploymentsFixedSizeCollection> {
public static ApiFuture<ListDeploymentsPagedResponse> createAsync(
PageContext<ListDeploymentsRequest, ListDeploymentsResponse, Deployment> context,
ApiFuture<ListDeploymentsResponse> futureResponse) {
ApiFuture<ListDeploymentsPage> futurePage =
ListDeploymentsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListDeploymentsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListDeploymentsPagedResponse(ListDeploymentsPage page) {
super(page, ListDeploymentsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListDeploymentsPage
extends AbstractPage<
ListDeploymentsRequest, ListDeploymentsResponse, Deployment, ListDeploymentsPage> {
private ListDeploymentsPage(
PageContext<ListDeploymentsRequest, ListDeploymentsResponse, Deployment> context,
ListDeploymentsResponse response) {
super(context, response);
}
private static ListDeploymentsPage createEmptyPage() {
return new ListDeploymentsPage(null, null);
}
@Override
protected ListDeploymentsPage createPage(
PageContext<ListDeploymentsRequest, ListDeploymentsResponse, Deployment> context,
ListDeploymentsResponse response) {
return new ListDeploymentsPage(context, response);
}
@Override
public ApiFuture<ListDeploymentsPage> createPageAsync(
PageContext<ListDeploymentsRequest, ListDeploymentsResponse, Deployment> context,
ApiFuture<ListDeploymentsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListDeploymentsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListDeploymentsRequest,
ListDeploymentsResponse,
Deployment,
ListDeploymentsPage,
ListDeploymentsFixedSizeCollection> {
private ListDeploymentsFixedSizeCollection(
List<ListDeploymentsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListDeploymentsFixedSizeCollection createEmptyCollection() {
return new ListDeploymentsFixedSizeCollection(null, 0);
}
@Override
protected ListDeploymentsFixedSizeCollection createCollection(
List<ListDeploymentsPage> pages, int collectionSize) {
return new ListDeploymentsFixedSizeCollection(pages, collectionSize);
}
}
public static class ListLocationsPagedResponse
extends AbstractPagedListResponse<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
public static ApiFuture<ListLocationsPagedResponse> createAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
ApiFuture<ListLocationsPage> futurePage =
ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListLocationsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListLocationsPagedResponse(ListLocationsPage page) {
super(page, ListLocationsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListLocationsPage
extends AbstractPage<
ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> {
private ListLocationsPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
super(context, response);
}
private static ListLocationsPage createEmptyPage() {
return new ListLocationsPage(null, null);
}
@Override
protected ListLocationsPage createPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
return new ListLocationsPage(context, response);
}
@Override
public ApiFuture<ListLocationsPage> createPageAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListLocationsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListLocationsFixedSizeCollection createEmptyCollection() {
return new ListLocationsFixedSizeCollection(null, 0);
}
@Override
protected ListLocationsFixedSizeCollection createCollection(
List<ListLocationsPage> pages, int collectionSize) {
return new ListLocationsFixedSizeCollection(pages, collectionSize);
}
}
}
|
oracle/graal | 37,106 | substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/reflect/serialize/SerializationFeature.java | /*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2020, Alibaba Group Holding Limited. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.hosted.reflect.serialize;
import static com.oracle.svm.hosted.lambda.LambdaParser.createMethodGraph;
import static com.oracle.svm.hosted.lambda.LambdaParser.getLambdaClassFromConstantNode;
import java.io.Externalizable;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.ObjectStreamField;
import java.io.Serializable;
import java.lang.invoke.SerializedLambda;
import java.lang.reflect.Constructor;
import java.lang.reflect.Executable;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.graalvm.nativeimage.ImageSingletons;
import org.graalvm.nativeimage.dynamicaccess.AccessCondition;
import org.graalvm.nativeimage.hosted.Feature;
import org.graalvm.nativeimage.hosted.RuntimeReflection;
import org.graalvm.nativeimage.impl.RuntimeReflectionSupport;
import org.graalvm.nativeimage.impl.RuntimeSerializationSupport;
import com.oracle.graal.pointsto.util.GraalAccess;
import com.oracle.svm.configure.ConfigurationFile;
import com.oracle.svm.configure.ConfigurationParserOption;
import com.oracle.svm.configure.SerializationConfigurationParser;
import com.oracle.svm.configure.config.conditional.AccessConditionResolver;
import com.oracle.svm.core.configure.ConfigurationFiles;
import com.oracle.svm.core.feature.AutomaticallyRegisteredFeature;
import com.oracle.svm.core.feature.InternalFeature;
import com.oracle.svm.core.hub.DynamicHub;
import com.oracle.svm.core.reflect.SubstrateConstructorAccessor;
import com.oracle.svm.core.reflect.serialize.SerializationSupport;
import com.oracle.svm.core.reflect.target.ReflectionSubstitutionSupport;
import com.oracle.svm.core.util.BasedOnJDKFile;
import com.oracle.svm.core.util.VMError;
import com.oracle.svm.hosted.ConditionalConfigurationRegistry;
import com.oracle.svm.hosted.ConfigurationTypeResolver;
import com.oracle.svm.hosted.FallbackFeature;
import com.oracle.svm.hosted.FeatureImpl;
import com.oracle.svm.hosted.ImageClassLoader;
import com.oracle.svm.hosted.classinitialization.ClassInitializationSupport;
import com.oracle.svm.hosted.config.ConfigurationParserUtils;
import com.oracle.svm.hosted.lambda.LambdaParser;
import com.oracle.svm.hosted.reflect.NativeImageConditionResolver;
import com.oracle.svm.hosted.reflect.RecordUtils;
import com.oracle.svm.hosted.reflect.ReflectionFeature;
import com.oracle.svm.hosted.reflect.proxy.DynamicProxyFeature;
import com.oracle.svm.hosted.reflect.proxy.ProxyRegistry;
import com.oracle.svm.util.LogUtils;
import com.oracle.svm.util.ReflectionUtil;
import jdk.graal.compiler.graph.iterators.NodeIterable;
import jdk.graal.compiler.nodes.ConstantNode;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.options.OptionValues;
import jdk.internal.access.JavaLangReflectAccess;
import jdk.internal.reflect.ConstructorAccessor;
import jdk.internal.reflect.ReflectionFactory;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.MetaAccessProvider;
import jdk.vm.ci.meta.ResolvedJavaMethod;
@AutomaticallyRegisteredFeature
public class SerializationFeature implements InternalFeature {
final Set<Class<?>> capturingClasses = ConcurrentHashMap.newKeySet();
private SerializationBuilder serializationBuilder;
private SerializationDenyRegistry serializationDenyRegistry;
private int loadedConfigurations;
@Override
public List<Class<? extends Feature>> getRequiredFeatures() {
return List.of(ReflectionFeature.class, DynamicProxyFeature.class);
}
@Override
public void afterRegistration(AfterRegistrationAccess a) {
FeatureImpl.AfterRegistrationAccessImpl access = (FeatureImpl.AfterRegistrationAccessImpl) a;
ImageClassLoader imageClassLoader = access.getImageClassLoader();
ConfigurationTypeResolver typeResolver = new ConfigurationTypeResolver("serialization configuration", imageClassLoader);
serializationDenyRegistry = new SerializationDenyRegistry(typeResolver);
serializationBuilder = new SerializationBuilder(serializationDenyRegistry, access, typeResolver, ImageSingletons.lookup(ProxyRegistry.class));
/*
* The serialization builder registration has to happen after registration so the
* ReflectionFeature can access it when creating parsers during setup.
*/
ImageSingletons.add(RuntimeSerializationSupport.class, serializationBuilder);
}
@Override
public void duringSetup(DuringSetupAccess a) {
FeatureImpl.DuringSetupAccessImpl access = (FeatureImpl.DuringSetupAccessImpl) a;
ImageClassLoader imageClassLoader = access.getImageClassLoader();
AccessConditionResolver<AccessCondition> conditionResolver = new NativeImageConditionResolver(imageClassLoader, ClassInitializationSupport.singleton());
EnumSet<ConfigurationParserOption> parserOptions = ConfigurationFiles.Options.getConfigurationParserOptions();
SerializationConfigurationParser<AccessCondition> parser = SerializationConfigurationParser.create(true, conditionResolver, serializationBuilder, parserOptions);
loadedConfigurations = ConfigurationParserUtils.parseAndRegisterConfigurationsFromCombinedFile(parser, imageClassLoader, "serialization");
SerializationConfigurationParser<AccessCondition> denyCollectorParser = SerializationConfigurationParser.create(false, conditionResolver, serializationDenyRegistry, parserOptions);
ConfigurationParserUtils.parseAndRegisterConfigurations(denyCollectorParser, imageClassLoader, "serialization",
ConfigurationFiles.Options.SerializationDenyConfigurationFiles, ConfigurationFiles.Options.SerializationDenyConfigurationResources,
ConfigurationFile.SERIALIZATION_DENY.getFileName());
SerializationConfigurationParser<AccessCondition> legacyParser = SerializationConfigurationParser.create(false, conditionResolver, serializationBuilder, parserOptions);
loadedConfigurations += ConfigurationParserUtils.parseAndRegisterConfigurations(legacyParser, imageClassLoader, "serialization",
ConfigurationFiles.Options.SerializationConfigurationFiles, ConfigurationFiles.Options.SerializationConfigurationResources,
ConfigurationFile.SERIALIZATION.getFileName());
}
private static void registerLambdasFromConstantNodesInGraph(StructuredGraph graph, SerializationBuilder serializationBuilder) {
NodeIterable<ConstantNode> constantNodes = ConstantNode.getConstantNodes(graph);
for (ConstantNode cNode : constantNodes) {
Class<?> lambdaClass = getLambdaClassFromConstantNode(cNode);
if (lambdaClass != null && Serializable.class.isAssignableFrom(lambdaClass)) {
RuntimeReflection.register(ReflectionUtil.lookupMethod(lambdaClass, "writeReplace"));
SerializationBuilder.registerSerializationUIDElements(lambdaClass, false);
serializationBuilder.serializationSupport.registerSerializationTargetClass(AccessCondition.unconditional(), serializationBuilder.getHostVM().dynamicHub(lambdaClass));
}
}
}
private static void registerLambdasFromMethod(ResolvedJavaMethod method, SerializationBuilder serializationBuilder, OptionValues options) {
StructuredGraph graph = createMethodGraph(method, options);
registerLambdasFromConstantNodesInGraph(graph, serializationBuilder);
}
@Override
public void beforeAnalysis(BeforeAnalysisAccess access) {
serializationBuilder.beforeAnalysis(access);
}
@Override
public void duringAnalysis(DuringAnalysisAccess access) {
FeatureImpl.DuringAnalysisAccessImpl impl = (FeatureImpl.DuringAnalysisAccessImpl) access;
OptionValues options = impl.getBigBang().getOptions();
/*
* In order to serialize lambda classes we need to register proper methods for reflection.
* We register all the lambdas from capturing classes written in the serialization
* configuration file for serialization. In order to find all the lambdas from a class, we
* parse all the methods of the given class and find all the lambdas in them.
*/
MetaAccessProvider metaAccess = GraalAccess.getOriginalProviders().getMetaAccess();
capturingClasses.parallelStream()
.map(metaAccess::lookupJavaType)
.flatMap(LambdaParser::allExecutablesDeclaredInClass)
.filter(m -> m.getCode() != null)
.forEach(m -> registerLambdasFromMethod(m, serializationBuilder, options));
capturingClasses.clear();
}
@Override
public void afterAnalysis(AfterAnalysisAccess access) {
serializationBuilder.afterAnalysis();
}
@Override
public void beforeCompilation(BeforeCompilationAccess access) {
if (ImageSingletons.contains(FallbackFeature.class)) {
FallbackFeature.FallbackImageRequest serializationFallback = ImageSingletons.lookup(FallbackFeature.class).serializationFallback;
if (serializationFallback != null && loadedConfigurations == 0) {
throw serializationFallback;
}
}
serializationBuilder.serializationSupport.replaceHubKeyWithTypeID();
}
public static Object getConstructorAccessor(Constructor<?> constructor) {
return SerializationBuilder.getConstructorAccessor(constructor);
}
}
final class SerializationDenyRegistry implements RuntimeSerializationSupport<AccessCondition> {
private final Map<Class<?>, Boolean> deniedClasses = new HashMap<>();
private final ConfigurationTypeResolver typeResolver;
SerializationDenyRegistry(ConfigurationTypeResolver typeResolver) {
this.typeResolver = typeResolver;
}
/**
* No need to deny all associated classes, only the specified class itself is registered as
* denied.
*/
@Override
public void registerIncludingAssociatedClasses(AccessCondition condition, Class<?> clazz) {
register(condition, clazz);
}
@Override
public void register(AccessCondition condition, Class<?> clazz) {
if (clazz != null) {
deniedClasses.put(clazz, true);
}
}
@Override
public void register(AccessCondition condition, String className) {
this.register(condition, typeResolver.resolveType(className));
}
@Override
public void registerLambdaCapturingClass(AccessCondition condition, String lambdaCapturingClassName) {
Class<?> lambdaCapturingClass = typeResolver.resolveType(lambdaCapturingClassName);
if (lambdaCapturingClass != null) {
deniedClasses.put(lambdaCapturingClass, true);
}
}
@Override
public void registerProxyClass(AccessCondition condition, List<String> implementedInterfaces) {
}
public boolean isAllowed(Class<?> clazz) {
boolean denied = deniedClasses.containsKey(clazz);
if (denied && deniedClasses.get(clazz)) {
deniedClasses.put(clazz, false); /* Warn only once */
LogUtils.warning("Serialization deny list contains %s. Image will not support serialization/deserialization of this class.", clazz.getName());
}
return !denied;
}
}
final class SerializationBuilder extends ConditionalConfigurationRegistry implements RuntimeSerializationSupport<AccessCondition> {
private static final Method getConstructorAccessorMethod = ReflectionUtil.lookupMethod(Constructor.class, "getConstructorAccessor");
private static final Method getExternalizableConstructorMethod = ReflectionUtil.lookupMethod(ObjectStreamClass.class, "getExternalizableConstructor", Class.class);
private Constructor<?> stubConstructor;
private final Field descField;
private final Method getDataLayoutMethod;
final SerializationSupport serializationSupport;
private final SerializationDenyRegistry denyRegistry;
private final ConfigurationTypeResolver typeResolver;
private final FeatureImpl.AfterRegistrationAccessImpl access;
private final Method disableSerialConstructorChecks;
private final Method superHasAccessibleConstructor;
private final Method packageEquals;
private final ProxyRegistry proxyRegistry;
private List<Runnable> pendingConstructorRegistrations;
SerializationBuilder(SerializationDenyRegistry serializationDenyRegistry, FeatureImpl.AfterRegistrationAccessImpl access, ConfigurationTypeResolver typeResolver, ProxyRegistry proxyRegistry) {
this.access = access;
Class<?> classDataSlotClazz = access.findClassByName("java.io.ObjectStreamClass$ClassDataSlot");
this.descField = ReflectionUtil.lookupField(classDataSlotClazz, "desc");
this.getDataLayoutMethod = ReflectionUtil.lookupMethod(ObjectStreamClass.class, "getClassDataLayout");
this.disableSerialConstructorChecks = ReflectionUtil.lookupMethod(true, ReflectionFactory.class, "disableSerialConstructorChecks");
this.superHasAccessibleConstructor = ReflectionUtil.lookupMethod(ReflectionFactory.class, "superHasAccessibleConstructor", Class.class);
this.packageEquals = ReflectionUtil.lookupMethod(ReflectionFactory.class, "packageEquals", Class.class, Class.class);
this.pendingConstructorRegistrations = new ArrayList<>();
this.denyRegistry = serializationDenyRegistry;
this.typeResolver = typeResolver;
this.proxyRegistry = proxyRegistry;
this.serializationSupport = new SerializationSupport();
ImageSingletons.add(SerializationSupport.class, serializationSupport);
}
@Override
public void registerIncludingAssociatedClasses(AccessCondition condition, Class<?> clazz) {
abortIfSealed();
Objects.requireNonNull(clazz, () -> nullErrorMessage("class", "serialization"));
registerIncludingAssociatedClasses(condition, clazz, new HashSet<>());
}
private void registerIncludingAssociatedClasses(AccessCondition condition, Class<?> clazz, Set<Class<?>> alreadyVisited) {
if (alreadyVisited.contains(clazz)) {
return;
}
alreadyVisited.add(clazz);
String targetClassName = clazz.getName();
// If the serialization target is primitive, it needs to get boxed, because the target is
// always an Object.
if (clazz.isPrimitive()) {
Class<?> boxedType = JavaKind.fromJavaClass(clazz).toBoxedJavaClass();
registerIncludingAssociatedClasses(condition, boxedType, alreadyVisited);
return;
} else if (!Serializable.class.isAssignableFrom(clazz)) {
return;
} else if (access.findSubclasses(clazz).size() > 1) {
// The classes returned from access.findSubclasses API including the base class itself
LogUtils.warning("Class %s has subclasses. No classes were registered for object serialization.", targetClassName);
return;
}
try {
clazz.getDeclaredMethod("writeObject", ObjectOutputStream.class);
LogUtils.warning("Class %s implements its own writeObject method for object serialization. Any serialization types it uses need to be explicitly registered.", targetClassName);
return;
} catch (NoSuchMethodException e) {
// Expected case. Do nothing
}
register(condition, clazz);
if (clazz.isArray()) {
registerIncludingAssociatedClasses(condition, clazz.getComponentType(), alreadyVisited);
return;
}
ObjectStreamClass osc = ObjectStreamClass.lookup(clazz);
try {
for (Object o : (Object[]) getDataLayoutMethod.invoke(osc)) {
ObjectStreamClass desc = (ObjectStreamClass) descField.get(o);
if (!desc.equals(osc)) {
registerIncludingAssociatedClasses(condition, desc.forClass(), alreadyVisited);
}
}
} catch (ReflectiveOperationException e) {
throw VMError.shouldNotReachHere("Cannot register serialization classes due to", e);
}
for (ObjectStreamField field : osc.getFields()) {
registerIncludingAssociatedClasses(condition, field.getType(), alreadyVisited);
}
}
@Override
public void registerLambdaCapturingClass(AccessCondition condition, String lambdaCapturingClassName) {
abortIfSealed();
Objects.requireNonNull(lambdaCapturingClassName, () -> nullErrorMessage("lambda capturing class", "serialization"));
Class<?> lambdaCapturingClass = typeResolver.resolveType(lambdaCapturingClassName);
if (lambdaCapturingClass == null || lambdaCapturingClass.isPrimitive() || lambdaCapturingClass.isArray()) {
return;
}
if (ReflectionUtil.lookupMethod(true, lambdaCapturingClass, "$deserializeLambda$", SerializedLambda.class) == null) {
LogUtils.warning("Could not register %s for lambda serialization as it does not capture any serializable lambda.", lambdaCapturingClass);
return;
}
registerConditionalConfiguration(condition, (cnd) -> {
ImageSingletons.lookup(SerializationFeature.class).capturingClasses.add(lambdaCapturingClass);
RuntimeReflection.register(lambdaCapturingClass);
RuntimeReflection.register(ReflectionUtil.lookupMethod(lambdaCapturingClass, "$deserializeLambda$", SerializedLambda.class));
SerializationSupport.currentLayer().registerLambdaCapturingClass(cnd, lambdaCapturingClassName);
});
}
@Override
public void registerProxyClass(AccessCondition condition, List<String> implementedInterfaces) {
abortIfSealed();
registerConditionalConfiguration(condition, (cnd) -> {
try {
Class<?> proxyClass = proxyRegistry.createProxyClassForSerialization(implementedInterfaces);
register(cnd, proxyClass);
} catch (IllegalArgumentException t) {
/* ignore: can't serialize class that can't be instantiated */
}
});
}
@Override
public void register(AccessCondition condition, String targetClassName) {
abortIfSealed();
Class<?> serializationTargetClass = typeResolver.resolveType(targetClassName);
/* With invalid streams we have to register the class for lookup */
ImageSingletons.lookup(RuntimeReflectionSupport.class).registerClassLookup(condition, targetClassName);
if (serializationTargetClass == null) {
return;
}
register(condition, serializationTargetClass);
}
@Override
public void register(AccessCondition condition, Class<?> serializationTargetClass) {
abortIfSealed();
Objects.requireNonNull(serializationTargetClass, () -> nullErrorMessage("class", "serialization"));
registerConditionalConfiguration(condition, (cnd) -> {
/*
* Register class for reflection as it is needed when the class-value itself is
* serialized.
*/
ImageSingletons.lookup(RuntimeReflectionSupport.class).register(condition, serializationTargetClass);
if (!Serializable.class.isAssignableFrom(serializationTargetClass)) {
return;
}
/*
* Making this class reachable as it will end up in the image heap without the analysis
* knowing.
*/
RuntimeReflection.register(java.io.ObjectOutputStream.class);
if (denyRegistry.isAllowed(serializationTargetClass)) {
addOrQueueConstructorAccessors(cnd, serializationTargetClass, getHostVM().dynamicHub(serializationTargetClass));
Class<?> superclass = serializationTargetClass.getSuperclass();
if (superclass != null) {
ImageSingletons.lookup(RuntimeReflectionSupport.class).registerAllDeclaredConstructorsQuery(AccessCondition.unconditional(), true, superclass);
ImageSingletons.lookup(RuntimeReflectionSupport.class).registerMethodLookup(AccessCondition.unconditional(), superclass, "writeReplace");
ImageSingletons.lookup(RuntimeReflectionSupport.class).registerMethodLookup(AccessCondition.unconditional(), superclass, "readResolve");
}
registerForSerialization(cnd, serializationTargetClass);
registerForDeserialization(cnd, serializationTargetClass);
}
});
}
private void addOrQueueConstructorAccessors(AccessCondition cnd, Class<?> serializationTargetClass, DynamicHub hub) {
if (pendingConstructorRegistrations != null) {
// cannot yet create constructor accessor -> add to pending
pendingConstructorRegistrations.add(() -> registerConstructorAccessors(cnd, serializationTargetClass, hub));
} else {
// can already run the registration
registerConstructorAccessors(cnd, serializationTargetClass, hub);
}
}
private void registerConstructorAccessors(AccessCondition cnd, Class<?> serializationTargetClass, DynamicHub hub) {
serializationSupport.registerSerializationTargetClass(cnd, hub);
registerConstructorAccessor(cnd, serializationTargetClass, null);
for (Class<?> superclass = serializationTargetClass; superclass != null; superclass = superclass.getSuperclass()) {
registerConstructorAccessor(cnd, serializationTargetClass, superclass);
}
}
private void registerConstructorAccessor(AccessCondition cnd, Class<?> serializationTargetClass, Class<?> targetConstructorClass) {
Optional.ofNullable(addConstructorAccessor(serializationTargetClass, targetConstructorClass))
.map(ReflectionUtil::lookupConstructor)
.ifPresent(methods -> ImageSingletons.lookup(RuntimeReflectionSupport.class).register(cnd, false, methods));
}
void beforeAnalysis(Feature.BeforeAnalysisAccess beforeAnalysisAccess) {
setAnalysisAccess(beforeAnalysisAccess);
stubConstructor = newConstructorForSerialization(SerializationSupport.StubForAbstractClass.class, null);
pendingConstructorRegistrations.forEach(Runnable::run);
pendingConstructorRegistrations = null;
serializationSupport.setStubConstructor(stubConstructor);
}
private static void registerQueriesForInheritableMethod(Class<?> clazz, String methodName, Class<?>... args) {
Class<?> iter = clazz;
while (iter != null) {
RuntimeReflection.registerMethodLookup(iter, methodName, args);
Method method = ReflectionUtil.lookupMethod(true, clazz, methodName, args);
if (method != null) {
RuntimeReflection.register(method);
break;
}
iter = iter.getSuperclass();
}
}
private static void registerMethod(AccessCondition cnd, Class<?> clazz, String methodName, Class<?>... args) {
Method method = ReflectionUtil.lookupMethod(true, clazz, methodName, args);
if (method != null) {
ImageSingletons.lookup(RuntimeReflectionSupport.class).register(cnd, false, method);
} else {
RuntimeReflection.registerMethodLookup(clazz, methodName, args);
}
}
private void registerForSerialization(AccessCondition cnd, Class<?> serializationTargetClass) {
if (Serializable.class.isAssignableFrom(serializationTargetClass)) {
/*
* ObjectStreamClass.computeDefaultSUID is always called at runtime to verify
* serialization class consistency, so need to register all constructors, methods and
* fields.
*/
registerSerializationUIDElements(serializationTargetClass, true); // if MRE
/*
* Required by jdk.internal.reflect.ReflectionFactory.newConstructorForSerialization
*/
Class<?> initCl = serializationTargetClass;
boolean initClValid = true;
while (Serializable.class.isAssignableFrom(initCl)) {
Class<?> prev = initCl;
RuntimeReflection.registerAllDeclaredConstructors(initCl);
if ((initCl = initCl.getSuperclass()) == null || (!disableSerialConstructorChecks() &&
!prev.isArray() && !superHasAccessibleConstructor(prev))) {
initClValid = false;
break;
}
}
if (initClValid) {
RuntimeReflection.registerAllDeclaredConstructors(initCl);
}
Class<?> iter = serializationTargetClass;
while (iter != null) {
RuntimeReflection.registerAllDeclaredFields(iter);
try {
Arrays.stream(iter.getDeclaredFields())
.map(Field::getType).forEach(type -> {
RuntimeReflection.registerAllDeclaredMethods(type);
RuntimeReflection.registerAllDeclaredFields(type);
RuntimeReflection.registerAllDeclaredConstructors(type);
});
} catch (LinkageError l) {
/* Handled with registration above */
}
iter = iter.getSuperclass();
}
}
registerQueriesForInheritableMethod(serializationTargetClass, "writeReplace");
registerQueriesForInheritableMethod(serializationTargetClass, "readResolve");
registerMethod(cnd, serializationTargetClass, "writeObject", ObjectOutputStream.class);
registerMethod(cnd, serializationTargetClass, "readObjectNoData");
registerMethod(cnd, serializationTargetClass, "readObject", ObjectInputStream.class);
}
@SuppressWarnings("unused")
static void registerSerializationUIDElements(Class<?> serializationTargetClass, boolean fullyRegister) {
RuntimeReflection.registerAllDeclaredConstructors(serializationTargetClass);
RuntimeReflection.registerAllDeclaredMethods(serializationTargetClass);
RuntimeReflection.registerAllDeclaredFields(serializationTargetClass);
if (fullyRegister) {
try {
/* This is here a legacy that we can't remove as it is a breaking change */
RuntimeReflection.register(serializationTargetClass.getDeclaredConstructors());
RuntimeReflection.register(serializationTargetClass.getDeclaredMethods());
RuntimeReflection.register(serializationTargetClass.getDeclaredFields());
} catch (LinkageError e) {
/* Handled by registrations above */
}
}
RuntimeReflection.registerFieldLookup(serializationTargetClass, "serialPersistentFields");
}
public void afterAnalysis() {
sealed();
}
private static void registerForDeserialization(AccessCondition cnd, Class<?> serializationTargetClass) {
ImageSingletons.lookup(RuntimeReflectionSupport.class).register(cnd, serializationTargetClass);
if (serializationTargetClass.isRecord()) {
/*
* Serialization for records invokes Class.getRecordComponents(). Registering all record
* component accessor methods for reflection ensures that the record components are
* available at run time.
*/
ImageSingletons.lookup(RuntimeReflectionSupport.class).registerAllRecordComponentsQuery(cnd, serializationTargetClass);
try {
/* Serialization for records uses the canonical record constructor directly. */
Executable[] methods = new Executable[]{RecordUtils.getCanonicalRecordConstructor(serializationTargetClass)};
ImageSingletons.lookup(RuntimeReflectionSupport.class).register(cnd, false, methods);
Executable[] methods1 = RecordUtils.getRecordComponentAccessorMethods(serializationTargetClass);
ImageSingletons.lookup(RuntimeReflectionSupport.class).register(cnd, false, methods1);
} catch (LinkageError le) {
/*
* Handled by the record component registration above.
*/
}
} else if (Externalizable.class.isAssignableFrom(serializationTargetClass)) {
RuntimeReflection.registerConstructorLookup(serializationTargetClass);
}
registerMethod(cnd, serializationTargetClass, "readObject", ObjectInputStream.class);
registerMethod(cnd, serializationTargetClass, "readResolve");
}
private Constructor<?> newConstructorForSerialization(Class<?> serializationTargetClass, Constructor<?> customConstructorToCall) {
Constructor<?> constructorToCall;
if (customConstructorToCall == null) {
constructorToCall = getConstructorForSerialization(serializationTargetClass);
} else {
constructorToCall = customConstructorToCall;
}
if (constructorToCall == null) {
return null;
}
ConstructorAccessor acc = getConstructorAccessor(serializationTargetClass, constructorToCall);
JavaLangReflectAccess langReflectAccess = ReflectionUtil.readField(ReflectionFactory.class, "langReflectAccess", ReflectionFactory.getReflectionFactory());
Method newConstructorWithAccessor = ReflectionUtil.lookupMethod(JavaLangReflectAccess.class, "newConstructorWithAccessor", Constructor.class, ConstructorAccessor.class);
return ReflectionUtil.invokeMethod(newConstructorWithAccessor, langReflectAccess, constructorToCall, acc);
}
private static ConstructorAccessor getConstructorAccessor(Class<?> serializationTargetClass, Constructor<?> constructorToCall) {
return (SubstrateConstructorAccessor) ReflectionSubstitutionSupport.singleton().getOrCreateConstructorAccessor(serializationTargetClass, constructorToCall);
}
/**
* Returns a constructor that allocates an instance of cl and that then initializes the instance
* by calling the no-arg constructor of its first non-serializable superclass. This is specified
* in the Serialization Specification, section 3.1, in step 11 of the deserialization process.
* If cl is not serializable, returns cl's no-arg constructor. If no accessible constructor is
* found, or if the class hierarchy is somehow malformed (e.g., a serializable class has no
* superclass), null is returned.
*
* @param cl the class for which a constructor is to be found
* @return the generated constructor, or null if none is available
*/
@BasedOnJDKFile("https://github.com/openjdk/jdk/blob/jdk-24+22/src/java.base/share/classes/jdk/internal/reflect/ReflectionFactory.java#L311-L332")
private Constructor<?> getConstructorForSerialization(Class<?> cl) {
Class<?> initCl = cl;
while (Serializable.class.isAssignableFrom(initCl)) {
Class<?> prev = initCl;
if ((initCl = initCl.getSuperclass()) == null || (!disableSerialConstructorChecks() &&
!superHasAccessibleConstructor(prev))) {
return null;
}
}
Constructor<?> constructorToCall;
try {
constructorToCall = initCl.getDeclaredConstructor();
int mods = constructorToCall.getModifiers();
if ((mods & Modifier.PRIVATE) != 0 ||
((mods & (Modifier.PUBLIC | Modifier.PROTECTED)) == 0 &&
!packageEquals(cl, initCl))) {
return null;
}
} catch (NoSuchMethodException ex) {
return null;
}
return constructorToCall;
}
private boolean superHasAccessibleConstructor(Class<?> prev) {
try {
return ReflectionUtil.invokeMethod(superHasAccessibleConstructor, ReflectionFactory.getReflectionFactory(), prev);
} catch (LinkageError le) {
return false;
}
}
private boolean disableSerialConstructorChecks() {
if (disableSerialConstructorChecks == null) {
return false;
}
return ReflectionUtil.invokeMethod(disableSerialConstructorChecks, null);
}
private boolean packageEquals(Class<?> cl1, Class<?> cl2) {
return ReflectionUtil.invokeMethod(packageEquals, null, cl1, cl2);
}
static Object getConstructorAccessor(Constructor<?> constructor) {
return ReflectionUtil.invokeMethod(getConstructorAccessorMethod, constructor);
}
private static Constructor<?> getExternalizableConstructor(Class<?> serializationTargetClass) {
return ReflectionUtil.invokeMethod(getExternalizableConstructorMethod, null, serializationTargetClass);
}
private Class<?> addConstructorAccessor(Class<?> serializationTargetClass, Class<?> customTargetConstructorClass) {
// Don't generate SerializationConstructorAccessor class for Externalizable case
if (Externalizable.class.isAssignableFrom(serializationTargetClass)) {
try {
Constructor<?> externalizableConstructor = getExternalizableConstructor(serializationTargetClass);
if (externalizableConstructor == null) {
externalizableConstructor = getExternalizableConstructor(Object.class);
}
return externalizableConstructor.getDeclaringClass();
} catch (Exception e) {
throw VMError.shouldNotReachHere(e);
}
}
Constructor<?> targetConstructor;
if (Modifier.isAbstract(serializationTargetClass.getModifiers())) {
VMError.guarantee(stubConstructor != null, "stubConstructor is null, calling this too early");
targetConstructor = stubConstructor;
} else {
Constructor<?> customConstructorToCall = null;
if (customTargetConstructorClass != null) {
customConstructorToCall = ReflectionUtil.lookupConstructor(true, customTargetConstructorClass);
if (customConstructorToCall == null) {
/* No suitable constructor, no need to register */
return null;
}
if (customTargetConstructorClass == serializationTargetClass) {
/* No custom constructor needed. Simply use existing no-arg constructor. */
return customTargetConstructorClass;
}
}
targetConstructor = newConstructorForSerialization(serializationTargetClass, customConstructorToCall);
if (targetConstructor == null) {
targetConstructor = newConstructorForSerialization(Object.class, customConstructorToCall);
}
}
Class<?> targetConstructorClass = targetConstructor.getDeclaringClass();
serializationSupport.addConstructorAccessor(serializationTargetClass, targetConstructorClass, getConstructorAccessor(targetConstructor));
return targetConstructorClass;
}
}
|
googleads/google-ads-java | 36,858 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/SuggestBrandsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/brand_suggestion_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v19.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.SuggestBrandsRequest}
*/
public final class SuggestBrandsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.SuggestBrandsRequest)
SuggestBrandsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SuggestBrandsRequest.newBuilder() to construct.
private SuggestBrandsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SuggestBrandsRequest() {
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SuggestBrandsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v19_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v19_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.SuggestBrandsRequest.class, com.google.ads.googleads.v19.services.SuggestBrandsRequest.Builder.class);
}
private int bitField0_;
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BRAND_PREFIX_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
@java.lang.Override
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
@java.lang.Override
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SELECTED_BRANDS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, brandPrefix_);
}
for (int i = 0; i < selectedBrands_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, selectedBrands_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, brandPrefix_);
}
{
int dataSize = 0;
for (int i = 0; i < selectedBrands_.size(); i++) {
dataSize += computeStringSizeNoTag(selectedBrands_.getRaw(i));
}
size += dataSize;
size += 1 * getSelectedBrandsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.SuggestBrandsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.SuggestBrandsRequest other = (com.google.ads.googleads.v19.services.SuggestBrandsRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (hasBrandPrefix() != other.hasBrandPrefix()) return false;
if (hasBrandPrefix()) {
if (!getBrandPrefix()
.equals(other.getBrandPrefix())) return false;
}
if (!getSelectedBrandsList()
.equals(other.getSelectedBrandsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (hasBrandPrefix()) {
hash = (37 * hash) + BRAND_PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getBrandPrefix().hashCode();
}
if (getSelectedBrandsCount() > 0) {
hash = (37 * hash) + SELECTED_BRANDS_FIELD_NUMBER;
hash = (53 * hash) + getSelectedBrandsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.SuggestBrandsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v19.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.SuggestBrandsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.SuggestBrandsRequest)
com.google.ads.googleads.v19.services.SuggestBrandsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v19_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v19_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.SuggestBrandsRequest.class, com.google.ads.googleads.v19.services.SuggestBrandsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.SuggestBrandsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v19_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SuggestBrandsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.SuggestBrandsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SuggestBrandsRequest build() {
com.google.ads.googleads.v19.services.SuggestBrandsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SuggestBrandsRequest buildPartial() {
com.google.ads.googleads.v19.services.SuggestBrandsRequest result = new com.google.ads.googleads.v19.services.SuggestBrandsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.SuggestBrandsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.brandPrefix_ = brandPrefix_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
selectedBrands_.makeImmutable();
result.selectedBrands_ = selectedBrands_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.SuggestBrandsRequest) {
return mergeFrom((com.google.ads.googleads.v19.services.SuggestBrandsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.SuggestBrandsRequest other) {
if (other == com.google.ads.googleads.v19.services.SuggestBrandsRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasBrandPrefix()) {
brandPrefix_ = other.brandPrefix_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.selectedBrands_.isEmpty()) {
if (selectedBrands_.isEmpty()) {
selectedBrands_ = other.selectedBrands_;
bitField0_ |= 0x00000004;
} else {
ensureSelectedBrandsIsMutable();
selectedBrands_.addAll(other.selectedBrands_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
brandPrefix_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureSelectedBrandsIsMutable();
selectedBrands_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefix(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBrandPrefix() {
brandPrefix_ = getDefaultInstance().getBrandPrefix();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefixBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureSelectedBrandsIsMutable() {
if (!selectedBrands_.isModifiable()) {
selectedBrands_ = new com.google.protobuf.LazyStringArrayList(selectedBrands_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
selectedBrands_.makeImmutable();
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The selectedBrands to set.
* @return This builder for chaining.
*/
public Builder setSelectedBrands(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrands(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addAllSelectedBrands(
java.lang.Iterable<java.lang.String> values) {
ensureSelectedBrandsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, selectedBrands_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearSelectedBrands() {
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrandsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.SuggestBrandsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.SuggestBrandsRequest)
private static final com.google.ads.googleads.v19.services.SuggestBrandsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.SuggestBrandsRequest();
}
public static com.google.ads.googleads.v19.services.SuggestBrandsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SuggestBrandsRequest>
PARSER = new com.google.protobuf.AbstractParser<SuggestBrandsRequest>() {
@java.lang.Override
public SuggestBrandsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SuggestBrandsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SuggestBrandsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.SuggestBrandsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,858 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/SuggestBrandsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/brand_suggestion_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v20.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.SuggestBrandsRequest}
*/
public final class SuggestBrandsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.SuggestBrandsRequest)
SuggestBrandsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SuggestBrandsRequest.newBuilder() to construct.
private SuggestBrandsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SuggestBrandsRequest() {
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SuggestBrandsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v20_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v20_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.SuggestBrandsRequest.class, com.google.ads.googleads.v20.services.SuggestBrandsRequest.Builder.class);
}
private int bitField0_;
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BRAND_PREFIX_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
@java.lang.Override
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
@java.lang.Override
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SELECTED_BRANDS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, brandPrefix_);
}
for (int i = 0; i < selectedBrands_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, selectedBrands_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, brandPrefix_);
}
{
int dataSize = 0;
for (int i = 0; i < selectedBrands_.size(); i++) {
dataSize += computeStringSizeNoTag(selectedBrands_.getRaw(i));
}
size += dataSize;
size += 1 * getSelectedBrandsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.SuggestBrandsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.SuggestBrandsRequest other = (com.google.ads.googleads.v20.services.SuggestBrandsRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (hasBrandPrefix() != other.hasBrandPrefix()) return false;
if (hasBrandPrefix()) {
if (!getBrandPrefix()
.equals(other.getBrandPrefix())) return false;
}
if (!getSelectedBrandsList()
.equals(other.getSelectedBrandsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (hasBrandPrefix()) {
hash = (37 * hash) + BRAND_PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getBrandPrefix().hashCode();
}
if (getSelectedBrandsCount() > 0) {
hash = (37 * hash) + SELECTED_BRANDS_FIELD_NUMBER;
hash = (53 * hash) + getSelectedBrandsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.SuggestBrandsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v20.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.SuggestBrandsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.SuggestBrandsRequest)
com.google.ads.googleads.v20.services.SuggestBrandsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v20_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v20_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.SuggestBrandsRequest.class, com.google.ads.googleads.v20.services.SuggestBrandsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.SuggestBrandsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v20_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SuggestBrandsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.SuggestBrandsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SuggestBrandsRequest build() {
com.google.ads.googleads.v20.services.SuggestBrandsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SuggestBrandsRequest buildPartial() {
com.google.ads.googleads.v20.services.SuggestBrandsRequest result = new com.google.ads.googleads.v20.services.SuggestBrandsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.SuggestBrandsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.brandPrefix_ = brandPrefix_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
selectedBrands_.makeImmutable();
result.selectedBrands_ = selectedBrands_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.SuggestBrandsRequest) {
return mergeFrom((com.google.ads.googleads.v20.services.SuggestBrandsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.SuggestBrandsRequest other) {
if (other == com.google.ads.googleads.v20.services.SuggestBrandsRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasBrandPrefix()) {
brandPrefix_ = other.brandPrefix_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.selectedBrands_.isEmpty()) {
if (selectedBrands_.isEmpty()) {
selectedBrands_ = other.selectedBrands_;
bitField0_ |= 0x00000004;
} else {
ensureSelectedBrandsIsMutable();
selectedBrands_.addAll(other.selectedBrands_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
brandPrefix_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureSelectedBrandsIsMutable();
selectedBrands_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefix(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBrandPrefix() {
brandPrefix_ = getDefaultInstance().getBrandPrefix();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefixBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureSelectedBrandsIsMutable() {
if (!selectedBrands_.isModifiable()) {
selectedBrands_ = new com.google.protobuf.LazyStringArrayList(selectedBrands_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
selectedBrands_.makeImmutable();
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The selectedBrands to set.
* @return This builder for chaining.
*/
public Builder setSelectedBrands(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrands(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addAllSelectedBrands(
java.lang.Iterable<java.lang.String> values) {
ensureSelectedBrandsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, selectedBrands_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearSelectedBrands() {
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrandsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.SuggestBrandsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.SuggestBrandsRequest)
private static final com.google.ads.googleads.v20.services.SuggestBrandsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.SuggestBrandsRequest();
}
public static com.google.ads.googleads.v20.services.SuggestBrandsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SuggestBrandsRequest>
PARSER = new com.google.protobuf.AbstractParser<SuggestBrandsRequest>() {
@java.lang.Override
public SuggestBrandsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SuggestBrandsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SuggestBrandsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.SuggestBrandsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,858 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/SuggestBrandsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/brand_suggestion_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v21.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.SuggestBrandsRequest}
*/
public final class SuggestBrandsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.SuggestBrandsRequest)
SuggestBrandsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SuggestBrandsRequest.newBuilder() to construct.
private SuggestBrandsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SuggestBrandsRequest() {
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SuggestBrandsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v21_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v21_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.SuggestBrandsRequest.class, com.google.ads.googleads.v21.services.SuggestBrandsRequest.Builder.class);
}
private int bitField0_;
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BRAND_PREFIX_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
@java.lang.Override
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
@java.lang.Override
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SELECTED_BRANDS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, brandPrefix_);
}
for (int i = 0; i < selectedBrands_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, selectedBrands_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, brandPrefix_);
}
{
int dataSize = 0;
for (int i = 0; i < selectedBrands_.size(); i++) {
dataSize += computeStringSizeNoTag(selectedBrands_.getRaw(i));
}
size += dataSize;
size += 1 * getSelectedBrandsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.SuggestBrandsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.SuggestBrandsRequest other = (com.google.ads.googleads.v21.services.SuggestBrandsRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (hasBrandPrefix() != other.hasBrandPrefix()) return false;
if (hasBrandPrefix()) {
if (!getBrandPrefix()
.equals(other.getBrandPrefix())) return false;
}
if (!getSelectedBrandsList()
.equals(other.getSelectedBrandsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (hasBrandPrefix()) {
hash = (37 * hash) + BRAND_PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getBrandPrefix().hashCode();
}
if (getSelectedBrandsCount() > 0) {
hash = (37 * hash) + SELECTED_BRANDS_FIELD_NUMBER;
hash = (53 * hash) + getSelectedBrandsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.SuggestBrandsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BrandSuggestionService.SuggestBrands][google.ads.googleads.v21.services.BrandSuggestionService.SuggestBrands].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.SuggestBrandsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.SuggestBrandsRequest)
com.google.ads.googleads.v21.services.SuggestBrandsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v21_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v21_services_SuggestBrandsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.SuggestBrandsRequest.class, com.google.ads.googleads.v21.services.SuggestBrandsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.SuggestBrandsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
brandPrefix_ = "";
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.BrandSuggestionServiceProto.internal_static_google_ads_googleads_v21_services_SuggestBrandsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SuggestBrandsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.SuggestBrandsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SuggestBrandsRequest build() {
com.google.ads.googleads.v21.services.SuggestBrandsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SuggestBrandsRequest buildPartial() {
com.google.ads.googleads.v21.services.SuggestBrandsRequest result = new com.google.ads.googleads.v21.services.SuggestBrandsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.SuggestBrandsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.brandPrefix_ = brandPrefix_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
selectedBrands_.makeImmutable();
result.selectedBrands_ = selectedBrands_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.SuggestBrandsRequest) {
return mergeFrom((com.google.ads.googleads.v21.services.SuggestBrandsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.SuggestBrandsRequest other) {
if (other == com.google.ads.googleads.v21.services.SuggestBrandsRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasBrandPrefix()) {
brandPrefix_ = other.brandPrefix_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.selectedBrands_.isEmpty()) {
if (selectedBrands_.isEmpty()) {
selectedBrands_ = other.selectedBrands_;
bitField0_ |= 0x00000004;
} else {
ensureSelectedBrandsIsMutable();
selectedBrands_.addAll(other.selectedBrands_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
brandPrefix_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
java.lang.String s = input.readStringRequireUtf8();
ensureSelectedBrandsIsMutable();
selectedBrands_.add(s);
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer onto which to apply the brand suggestion
* operation.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object brandPrefix_ = "";
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return Whether the brandPrefix field is set.
*/
public boolean hasBrandPrefix() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The brandPrefix.
*/
public java.lang.String getBrandPrefix() {
java.lang.Object ref = brandPrefix_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
brandPrefix_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for brandPrefix.
*/
public com.google.protobuf.ByteString
getBrandPrefixBytes() {
java.lang.Object ref = brandPrefix_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
brandPrefix_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefix(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearBrandPrefix() {
brandPrefix_ = getDefaultInstance().getBrandPrefix();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Required. The prefix of a brand name.
* </pre>
*
* <code>optional string brand_prefix = 2 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for brandPrefix to set.
* @return This builder for chaining.
*/
public Builder setBrandPrefixBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
brandPrefix_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureSelectedBrandsIsMutable() {
if (!selectedBrands_.isModifiable()) {
selectedBrands_ = new com.google.protobuf.LazyStringArrayList(selectedBrands_);
}
bitField0_ |= 0x00000004;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return A list containing the selectedBrands.
*/
public com.google.protobuf.ProtocolStringList
getSelectedBrandsList() {
selectedBrands_.makeImmutable();
return selectedBrands_;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The count of selectedBrands.
*/
public int getSelectedBrandsCount() {
return selectedBrands_.size();
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the element to return.
* @return The selectedBrands at the given index.
*/
public java.lang.String getSelectedBrands(int index) {
return selectedBrands_.get(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index of the value to return.
* @return The bytes of the selectedBrands at the given index.
*/
public com.google.protobuf.ByteString
getSelectedBrandsBytes(int index) {
return selectedBrands_.getByteString(index);
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param index The index to set the value at.
* @param value The selectedBrands to set.
* @return This builder for chaining.
*/
public Builder setSelectedBrands(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrands(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param values The selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addAllSelectedBrands(
java.lang.Iterable<java.lang.String> values) {
ensureSelectedBrandsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, selectedBrands_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return This builder for chaining.
*/
public Builder clearSelectedBrands() {
selectedBrands_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);;
onChanged();
return this;
}
/**
* <pre>
* Optional. Ids of the brands already selected by advertisers. They will be
* excluded in response. These are expected to be brand ids not brand names.
* </pre>
*
* <code>repeated string selected_brands = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param value The bytes of the selectedBrands to add.
* @return This builder for chaining.
*/
public Builder addSelectedBrandsBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureSelectedBrandsIsMutable();
selectedBrands_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.SuggestBrandsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.SuggestBrandsRequest)
private static final com.google.ads.googleads.v21.services.SuggestBrandsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.SuggestBrandsRequest();
}
public static com.google.ads.googleads.v21.services.SuggestBrandsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SuggestBrandsRequest>
PARSER = new com.google.protobuf.AbstractParser<SuggestBrandsRequest>() {
@java.lang.Override
public SuggestBrandsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SuggestBrandsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SuggestBrandsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.SuggestBrandsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,716 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/ListCatalogsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/catalog_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Response for
* [CatalogService.ListCatalogs][google.cloud.retail.v2.CatalogService.ListCatalogs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListCatalogsResponse}
*/
public final class ListCatalogsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.ListCatalogsResponse)
ListCatalogsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCatalogsResponse.newBuilder() to construct.
private ListCatalogsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCatalogsResponse() {
catalogs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCatalogsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CatalogServiceProto
.internal_static_google_cloud_retail_v2_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CatalogServiceProto
.internal_static_google_cloud_retail_v2_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListCatalogsResponse.class,
com.google.cloud.retail.v2.ListCatalogsResponse.Builder.class);
}
public static final int CATALOGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2.Catalog> catalogs_;
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2.Catalog> getCatalogsList() {
return catalogs_;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2.CatalogOrBuilder>
getCatalogsOrBuilderList() {
return catalogs_;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public int getCatalogsCount() {
return catalogs_.size();
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.Catalog getCatalogs(int index) {
return catalogs_.get(index);
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.CatalogOrBuilder getCatalogsOrBuilder(int index) {
return catalogs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < catalogs_.size(); i++) {
output.writeMessage(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < catalogs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, catalogs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.ListCatalogsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.ListCatalogsResponse other =
(com.google.cloud.retail.v2.ListCatalogsResponse) obj;
if (!getCatalogsList().equals(other.getCatalogsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCatalogsCount() > 0) {
hash = (37 * hash) + CATALOGS_FIELD_NUMBER;
hash = (53 * hash) + getCatalogsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListCatalogsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.ListCatalogsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for
* [CatalogService.ListCatalogs][google.cloud.retail.v2.CatalogService.ListCatalogs]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListCatalogsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.ListCatalogsResponse)
com.google.cloud.retail.v2.ListCatalogsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CatalogServiceProto
.internal_static_google_cloud_retail_v2_ListCatalogsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CatalogServiceProto
.internal_static_google_cloud_retail_v2_ListCatalogsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListCatalogsResponse.class,
com.google.cloud.retail.v2.ListCatalogsResponse.Builder.class);
}
// Construct using com.google.cloud.retail.v2.ListCatalogsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
} else {
catalogs_ = null;
catalogsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.CatalogServiceProto
.internal_static_google_cloud_retail_v2_ListCatalogsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListCatalogsResponse getDefaultInstanceForType() {
return com.google.cloud.retail.v2.ListCatalogsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.ListCatalogsResponse build() {
com.google.cloud.retail.v2.ListCatalogsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListCatalogsResponse buildPartial() {
com.google.cloud.retail.v2.ListCatalogsResponse result =
new com.google.cloud.retail.v2.ListCatalogsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2.ListCatalogsResponse result) {
if (catalogsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
catalogs_ = java.util.Collections.unmodifiableList(catalogs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.catalogs_ = catalogs_;
} else {
result.catalogs_ = catalogsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.retail.v2.ListCatalogsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.ListCatalogsResponse) {
return mergeFrom((com.google.cloud.retail.v2.ListCatalogsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.ListCatalogsResponse other) {
if (other == com.google.cloud.retail.v2.ListCatalogsResponse.getDefaultInstance())
return this;
if (catalogsBuilder_ == null) {
if (!other.catalogs_.isEmpty()) {
if (catalogs_.isEmpty()) {
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCatalogsIsMutable();
catalogs_.addAll(other.catalogs_);
}
onChanged();
}
} else {
if (!other.catalogs_.isEmpty()) {
if (catalogsBuilder_.isEmpty()) {
catalogsBuilder_.dispose();
catalogsBuilder_ = null;
catalogs_ = other.catalogs_;
bitField0_ = (bitField0_ & ~0x00000001);
catalogsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCatalogsFieldBuilder()
: null;
} else {
catalogsBuilder_.addAllMessages(other.catalogs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2.Catalog m =
input.readMessage(
com.google.cloud.retail.v2.Catalog.parser(), extensionRegistry);
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(m);
} else {
catalogsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2.Catalog> catalogs_ =
java.util.Collections.emptyList();
private void ensureCatalogsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
catalogs_ = new java.util.ArrayList<com.google.cloud.retail.v2.Catalog>(catalogs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.Catalog,
com.google.cloud.retail.v2.Catalog.Builder,
com.google.cloud.retail.v2.CatalogOrBuilder>
catalogsBuilder_;
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2.Catalog> getCatalogsList() {
if (catalogsBuilder_ == null) {
return java.util.Collections.unmodifiableList(catalogs_);
} else {
return catalogsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public int getCatalogsCount() {
if (catalogsBuilder_ == null) {
return catalogs_.size();
} else {
return catalogsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2.Catalog getCatalogs(int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(int index, com.google.cloud.retail.v2.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.set(index, value);
onChanged();
} else {
catalogsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder setCatalogs(
int index, com.google.cloud.retail.v2.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.set(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(com.google.cloud.retail.v2.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(value);
onChanged();
} else {
catalogsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(int index, com.google.cloud.retail.v2.Catalog value) {
if (catalogsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCatalogsIsMutable();
catalogs_.add(index, value);
onChanged();
} else {
catalogsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(com.google.cloud.retail.v2.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder addCatalogs(
int index, com.google.cloud.retail.v2.Catalog.Builder builderForValue) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.add(index, builderForValue.build());
onChanged();
} else {
catalogsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder addAllCatalogs(
java.lang.Iterable<? extends com.google.cloud.retail.v2.Catalog> values) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, catalogs_);
onChanged();
} else {
catalogsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder clearCatalogs() {
if (catalogsBuilder_ == null) {
catalogs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
catalogsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public Builder removeCatalogs(int index) {
if (catalogsBuilder_ == null) {
ensureCatalogsIsMutable();
catalogs_.remove(index);
onChanged();
} else {
catalogsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2.Catalog.Builder getCatalogsBuilder(int index) {
return getCatalogsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2.CatalogOrBuilder getCatalogsOrBuilder(int index) {
if (catalogsBuilder_ == null) {
return catalogs_.get(index);
} else {
return catalogsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.retail.v2.CatalogOrBuilder>
getCatalogsOrBuilderList() {
if (catalogsBuilder_ != null) {
return catalogsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(catalogs_);
}
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2.Catalog.Builder addCatalogsBuilder() {
return getCatalogsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public com.google.cloud.retail.v2.Catalog.Builder addCatalogsBuilder(int index) {
return getCatalogsFieldBuilder()
.addBuilder(index, com.google.cloud.retail.v2.Catalog.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Catalog][google.cloud.retail.v2.Catalog]s.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.Catalog catalogs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2.Catalog.Builder> getCatalogsBuilderList() {
return getCatalogsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.Catalog,
com.google.cloud.retail.v2.Catalog.Builder,
com.google.cloud.retail.v2.CatalogOrBuilder>
getCatalogsFieldBuilder() {
if (catalogsBuilder_ == null) {
catalogsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.Catalog,
com.google.cloud.retail.v2.Catalog.Builder,
com.google.cloud.retail.v2.CatalogOrBuilder>(
catalogs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
catalogs_ = null;
}
return catalogsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListCatalogsRequest.page_token][google.cloud.retail.v2.ListCatalogsRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.ListCatalogsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.ListCatalogsResponse)
private static final com.google.cloud.retail.v2.ListCatalogsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.ListCatalogsResponse();
}
public static com.google.cloud.retail.v2.ListCatalogsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCatalogsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCatalogsResponse>() {
@java.lang.Override
public ListCatalogsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCatalogsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCatalogsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListCatalogsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,798 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/OutputAudioConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/audio_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* Instructs the speech synthesizer how to generate the output audio content.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.OutputAudioConfig}
*/
public final class OutputAudioConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.OutputAudioConfig)
OutputAudioConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use OutputAudioConfig.newBuilder() to construct.
private OutputAudioConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OutputAudioConfig() {
audioEncoding_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OutputAudioConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3_OutputAudioConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3_OutputAudioConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.class,
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.Builder.class);
}
private int bitField0_;
public static final int AUDIO_ENCODING_FIELD_NUMBER = 1;
private int audioEncoding_ = 0;
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for audioEncoding.
*/
@java.lang.Override
public int getAudioEncodingValue() {
return audioEncoding_;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The audioEncoding.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding getAudioEncoding() {
com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding result =
com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.forNumber(audioEncoding_);
return result == null
? com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.UNRECOGNIZED
: result;
}
public static final int SAMPLE_RATE_HERTZ_FIELD_NUMBER = 2;
private int sampleRateHertz_ = 0;
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
public static final int SYNTHESIZE_SPEECH_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesizeSpeechConfig_;
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return Whether the synthesizeSpeechConfig field is set.
*/
@java.lang.Override
public boolean hasSynthesizeSpeechConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return The synthesizeSpeechConfig.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig getSynthesizeSpeechConfig() {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfigOrBuilder
getSynthesizeSpeechConfigOrBuilder() {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (audioEncoding_
!= com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.OUTPUT_AUDIO_ENCODING_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, audioEncoding_);
}
if (sampleRateHertz_ != 0) {
output.writeInt32(2, sampleRateHertz_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getSynthesizeSpeechConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (audioEncoding_
!= com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.OUTPUT_AUDIO_ENCODING_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, audioEncoding_);
}
if (sampleRateHertz_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, sampleRateHertz_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSynthesizeSpeechConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.OutputAudioConfig)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig other =
(com.google.cloud.dialogflow.cx.v3.OutputAudioConfig) obj;
if (audioEncoding_ != other.audioEncoding_) return false;
if (getSampleRateHertz() != other.getSampleRateHertz()) return false;
if (hasSynthesizeSpeechConfig() != other.hasSynthesizeSpeechConfig()) return false;
if (hasSynthesizeSpeechConfig()) {
if (!getSynthesizeSpeechConfig().equals(other.getSynthesizeSpeechConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AUDIO_ENCODING_FIELD_NUMBER;
hash = (53 * hash) + audioEncoding_;
hash = (37 * hash) + SAMPLE_RATE_HERTZ_FIELD_NUMBER;
hash = (53 * hash) + getSampleRateHertz();
if (hasSynthesizeSpeechConfig()) {
hash = (37 * hash) + SYNTHESIZE_SPEECH_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getSynthesizeSpeechConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3.OutputAudioConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Instructs the speech synthesizer how to generate the output audio content.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.OutputAudioConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.OutputAudioConfig)
com.google.cloud.dialogflow.cx.v3.OutputAudioConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3_OutputAudioConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3_OutputAudioConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.class,
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSynthesizeSpeechConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
audioEncoding_ = 0;
sampleRateHertz_ = 0;
synthesizeSpeechConfig_ = null;
if (synthesizeSpeechConfigBuilder_ != null) {
synthesizeSpeechConfigBuilder_.dispose();
synthesizeSpeechConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.AudioConfigProto
.internal_static_google_cloud_dialogflow_cx_v3_OutputAudioConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioConfig getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioConfig build() {
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioConfig buildPartial() {
com.google.cloud.dialogflow.cx.v3.OutputAudioConfig result =
new com.google.cloud.dialogflow.cx.v3.OutputAudioConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3.OutputAudioConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.audioEncoding_ = audioEncoding_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sampleRateHertz_ = sampleRateHertz_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.synthesizeSpeechConfig_ =
synthesizeSpeechConfigBuilder_ == null
? synthesizeSpeechConfig_
: synthesizeSpeechConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.OutputAudioConfig) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.OutputAudioConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.OutputAudioConfig other) {
if (other == com.google.cloud.dialogflow.cx.v3.OutputAudioConfig.getDefaultInstance())
return this;
if (other.audioEncoding_ != 0) {
setAudioEncodingValue(other.getAudioEncodingValue());
}
if (other.getSampleRateHertz() != 0) {
setSampleRateHertz(other.getSampleRateHertz());
}
if (other.hasSynthesizeSpeechConfig()) {
mergeSynthesizeSpeechConfig(other.getSynthesizeSpeechConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
audioEncoding_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
sampleRateHertz_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
input.readMessage(
getSynthesizeSpeechConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int audioEncoding_ = 0;
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for audioEncoding.
*/
@java.lang.Override
public int getAudioEncodingValue() {
return audioEncoding_;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for audioEncoding to set.
* @return This builder for chaining.
*/
public Builder setAudioEncodingValue(int value) {
audioEncoding_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The audioEncoding.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding getAudioEncoding() {
com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding result =
com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.forNumber(audioEncoding_);
return result == null
? com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The audioEncoding to set.
* @return This builder for chaining.
*/
public Builder setAudioEncoding(com.google.cloud.dialogflow.cx.v3.OutputAudioEncoding value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
audioEncoding_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Audio encoding of the synthesized audio content.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.OutputAudioEncoding audio_encoding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearAudioEncoding() {
bitField0_ = (bitField0_ & ~0x00000001);
audioEncoding_ = 0;
onChanged();
return this;
}
private int sampleRateHertz_;
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @param value The sampleRateHertz to set.
* @return This builder for chaining.
*/
public Builder setSampleRateHertz(int value) {
sampleRateHertz_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The synthesis sample rate (in hertz) for this audio. If not
* provided, then the synthesizer will use the default sample rate based on
* the audio encoding. If this is different from the voice's natural sample
* rate, then the synthesizer will honor this request by converting to the
* desired sample rate (which might result in worse audio quality).
* </pre>
*
* <code>int32 sample_rate_hertz = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSampleRateHertz() {
bitField0_ = (bitField0_ & ~0x00000002);
sampleRateHertz_ = 0;
onChanged();
return this;
}
private com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesizeSpeechConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfigOrBuilder>
synthesizeSpeechConfigBuilder_;
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return Whether the synthesizeSpeechConfig field is set.
*/
public boolean hasSynthesizeSpeechConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*
* @return The synthesizeSpeechConfig.
*/
public com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig getSynthesizeSpeechConfig() {
if (synthesizeSpeechConfigBuilder_ == null) {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
} else {
return synthesizeSpeechConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder setSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig value) {
if (synthesizeSpeechConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
synthesizeSpeechConfig_ = value;
} else {
synthesizeSpeechConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder setSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.Builder builderForValue) {
if (synthesizeSpeechConfigBuilder_ == null) {
synthesizeSpeechConfig_ = builderForValue.build();
} else {
synthesizeSpeechConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder mergeSynthesizeSpeechConfig(
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig value) {
if (synthesizeSpeechConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& synthesizeSpeechConfig_ != null
&& synthesizeSpeechConfig_
!= com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.getDefaultInstance()) {
getSynthesizeSpeechConfigBuilder().mergeFrom(value);
} else {
synthesizeSpeechConfig_ = value;
}
} else {
synthesizeSpeechConfigBuilder_.mergeFrom(value);
}
if (synthesizeSpeechConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public Builder clearSynthesizeSpeechConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
synthesizeSpeechConfig_ = null;
if (synthesizeSpeechConfigBuilder_ != null) {
synthesizeSpeechConfigBuilder_.dispose();
synthesizeSpeechConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.Builder
getSynthesizeSpeechConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSynthesizeSpeechConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfigOrBuilder
getSynthesizeSpeechConfigOrBuilder() {
if (synthesizeSpeechConfigBuilder_ != null) {
return synthesizeSpeechConfigBuilder_.getMessageOrBuilder();
} else {
return synthesizeSpeechConfig_ == null
? com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.getDefaultInstance()
: synthesizeSpeechConfig_;
}
}
/**
*
*
* <pre>
* Optional. Configuration of how speech should be synthesized.
* If not specified,
* [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]
* is applied.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig synthesize_speech_config = 3;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfigOrBuilder>
getSynthesizeSpeechConfigFieldBuilder() {
if (synthesizeSpeechConfigBuilder_ == null) {
synthesizeSpeechConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfig.Builder,
com.google.cloud.dialogflow.cx.v3.SynthesizeSpeechConfigOrBuilder>(
getSynthesizeSpeechConfig(), getParentForChildren(), isClean());
synthesizeSpeechConfig_ = null;
}
return synthesizeSpeechConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.OutputAudioConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.OutputAudioConfig)
private static final com.google.cloud.dialogflow.cx.v3.OutputAudioConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.OutputAudioConfig();
}
public static com.google.cloud.dialogflow.cx.v3.OutputAudioConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OutputAudioConfig> PARSER =
new com.google.protobuf.AbstractParser<OutputAudioConfig>() {
@java.lang.Override
public OutputAudioConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OutputAudioConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OutputAudioConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.OutputAudioConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,709 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/CreateSnapshotRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/snapshot.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* CreateSnapshotRequest creates a snapshot.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateSnapshotRequest}
*/
public final class CreateSnapshotRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.CreateSnapshotRequest)
CreateSnapshotRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateSnapshotRequest.newBuilder() to construct.
private CreateSnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateSnapshotRequest() {
parent_ = "";
snapshotId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateSnapshotRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.SnapshotProto
.internal_static_google_cloud_netapp_v1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.SnapshotProto
.internal_static_google_cloud_netapp_v1_CreateSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateSnapshotRequest.class,
com.google.cloud.netapp.v1.CreateSnapshotRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SNAPSHOT_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.Snapshot snapshot_;
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>.google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the snapshot field is set.
*/
@java.lang.Override
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>.google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The snapshot.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.Snapshot getSnapshot() {
return snapshot_ == null ? com.google.cloud.netapp.v1.Snapshot.getDefaultInstance() : snapshot_;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>.google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.SnapshotOrBuilder getSnapshotOrBuilder() {
return snapshot_ == null ? com.google.cloud.netapp.v1.Snapshot.getDefaultInstance() : snapshot_;
}
public static final int SNAPSHOT_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object snapshotId_ = "";
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The snapshotId.
*/
@java.lang.Override
public java.lang.String getSnapshotId() {
java.lang.Object ref = snapshotId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for snapshotId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSnapshotIdBytes() {
java.lang.Object ref = snapshotId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getSnapshot());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, snapshotId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSnapshot());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, snapshotId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.CreateSnapshotRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.CreateSnapshotRequest other =
(com.google.cloud.netapp.v1.CreateSnapshotRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasSnapshot() != other.hasSnapshot()) return false;
if (hasSnapshot()) {
if (!getSnapshot().equals(other.getSnapshot())) return false;
}
if (!getSnapshotId().equals(other.getSnapshotId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasSnapshot()) {
hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
hash = (53 * hash) + getSnapshot().hashCode();
}
hash = (37 * hash) + SNAPSHOT_ID_FIELD_NUMBER;
hash = (53 * hash) + getSnapshotId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.CreateSnapshotRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateSnapshotRequest creates a snapshot.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateSnapshotRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.CreateSnapshotRequest)
com.google.cloud.netapp.v1.CreateSnapshotRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.SnapshotProto
.internal_static_google_cloud_netapp_v1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.SnapshotProto
.internal_static_google_cloud_netapp_v1_CreateSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateSnapshotRequest.class,
com.google.cloud.netapp.v1.CreateSnapshotRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.CreateSnapshotRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSnapshotFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
snapshot_ = null;
if (snapshotBuilder_ != null) {
snapshotBuilder_.dispose();
snapshotBuilder_ = null;
}
snapshotId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.SnapshotProto
.internal_static_google_cloud_netapp_v1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateSnapshotRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.CreateSnapshotRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateSnapshotRequest build() {
com.google.cloud.netapp.v1.CreateSnapshotRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateSnapshotRequest buildPartial() {
com.google.cloud.netapp.v1.CreateSnapshotRequest result =
new com.google.cloud.netapp.v1.CreateSnapshotRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.CreateSnapshotRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.snapshot_ = snapshotBuilder_ == null ? snapshot_ : snapshotBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.snapshotId_ = snapshotId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.CreateSnapshotRequest) {
return mergeFrom((com.google.cloud.netapp.v1.CreateSnapshotRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.CreateSnapshotRequest other) {
if (other == com.google.cloud.netapp.v1.CreateSnapshotRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasSnapshot()) {
mergeSnapshot(other.getSnapshot());
}
if (!other.getSnapshotId().isEmpty()) {
snapshotId_ = other.snapshotId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getSnapshotFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
snapshotId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The NetApp volume to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/volumes/{volume_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.netapp.v1.Snapshot snapshot_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Snapshot,
com.google.cloud.netapp.v1.Snapshot.Builder,
com.google.cloud.netapp.v1.SnapshotOrBuilder>
snapshotBuilder_;
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the snapshot field is set.
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The snapshot.
*/
public com.google.cloud.netapp.v1.Snapshot getSnapshot() {
if (snapshotBuilder_ == null) {
return snapshot_ == null
? com.google.cloud.netapp.v1.Snapshot.getDefaultInstance()
: snapshot_;
} else {
return snapshotBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSnapshot(com.google.cloud.netapp.v1.Snapshot value) {
if (snapshotBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
snapshot_ = value;
} else {
snapshotBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSnapshot(com.google.cloud.netapp.v1.Snapshot.Builder builderForValue) {
if (snapshotBuilder_ == null) {
snapshot_ = builderForValue.build();
} else {
snapshotBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSnapshot(com.google.cloud.netapp.v1.Snapshot value) {
if (snapshotBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& snapshot_ != null
&& snapshot_ != com.google.cloud.netapp.v1.Snapshot.getDefaultInstance()) {
getSnapshotBuilder().mergeFrom(value);
} else {
snapshot_ = value;
}
} else {
snapshotBuilder_.mergeFrom(value);
}
if (snapshot_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSnapshot() {
bitField0_ = (bitField0_ & ~0x00000002);
snapshot_ = null;
if (snapshotBuilder_ != null) {
snapshotBuilder_.dispose();
snapshotBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.Snapshot.Builder getSnapshotBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSnapshotFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.SnapshotOrBuilder getSnapshotOrBuilder() {
if (snapshotBuilder_ != null) {
return snapshotBuilder_.getMessageOrBuilder();
} else {
return snapshot_ == null
? com.google.cloud.netapp.v1.Snapshot.getDefaultInstance()
: snapshot_;
}
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.netapp.v1.Snapshot snapshot = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Snapshot,
com.google.cloud.netapp.v1.Snapshot.Builder,
com.google.cloud.netapp.v1.SnapshotOrBuilder>
getSnapshotFieldBuilder() {
if (snapshotBuilder_ == null) {
snapshotBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Snapshot,
com.google.cloud.netapp.v1.Snapshot.Builder,
com.google.cloud.netapp.v1.SnapshotOrBuilder>(
getSnapshot(), getParentForChildren(), isClean());
snapshot_ = null;
}
return snapshotBuilder_;
}
private java.lang.Object snapshotId_ = "";
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The snapshotId.
*/
public java.lang.String getSnapshotId() {
java.lang.Object ref = snapshotId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for snapshotId.
*/
public com.google.protobuf.ByteString getSnapshotIdBytes() {
java.lang.Object ref = snapshotId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The snapshotId to set.
* @return This builder for chaining.
*/
public Builder setSnapshotId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
snapshotId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearSnapshotId() {
snapshotId_ = getDefaultInstance().getSnapshotId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the snapshot to create. Must be unique within the parent
* resource. Must contain only letters, numbers and hyphen, with the first
* character a letter, the last a letter or a
* number, and a 63 character maximum.
* </pre>
*
* <code>string snapshot_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for snapshotId to set.
* @return This builder for chaining.
*/
public Builder setSnapshotIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
snapshotId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.CreateSnapshotRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.CreateSnapshotRequest)
private static final com.google.cloud.netapp.v1.CreateSnapshotRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.CreateSnapshotRequest();
}
public static com.google.cloud.netapp.v1.CreateSnapshotRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateSnapshotRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateSnapshotRequest>() {
@java.lang.Override
public CreateSnapshotRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateSnapshotRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateSnapshotRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateSnapshotRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,680 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/ListUsableSubnetworksRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* ListUsableSubnetworksRequest requests the list of usable subnetworks.
* available to a user for creating clusters.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ListUsableSubnetworksRequest}
*/
public final class ListUsableSubnetworksRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.ListUsableSubnetworksRequest)
ListUsableSubnetworksRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListUsableSubnetworksRequest.newBuilder() to construct.
private ListUsableSubnetworksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListUsableSubnetworksRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListUsableSubnetworksRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ListUsableSubnetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ListUsableSubnetworksRequest.class,
com.google.container.v1beta1.ListUsableSubnetworksRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.ListUsableSubnetworksRequest)) {
return super.equals(obj);
}
com.google.container.v1beta1.ListUsableSubnetworksRequest other =
(com.google.container.v1beta1.ListUsableSubnetworksRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.container.v1beta1.ListUsableSubnetworksRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ListUsableSubnetworksRequest requests the list of usable subnetworks.
* available to a user for creating clusters.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ListUsableSubnetworksRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.ListUsableSubnetworksRequest)
com.google.container.v1beta1.ListUsableSubnetworksRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ListUsableSubnetworksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ListUsableSubnetworksRequest.class,
com.google.container.v1beta1.ListUsableSubnetworksRequest.Builder.class);
}
// Construct using com.google.container.v1beta1.ListUsableSubnetworksRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ListUsableSubnetworksRequest_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.ListUsableSubnetworksRequest getDefaultInstanceForType() {
return com.google.container.v1beta1.ListUsableSubnetworksRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.ListUsableSubnetworksRequest build() {
com.google.container.v1beta1.ListUsableSubnetworksRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.ListUsableSubnetworksRequest buildPartial() {
com.google.container.v1beta1.ListUsableSubnetworksRequest result =
new com.google.container.v1beta1.ListUsableSubnetworksRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1beta1.ListUsableSubnetworksRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.ListUsableSubnetworksRequest) {
return mergeFrom((com.google.container.v1beta1.ListUsableSubnetworksRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.ListUsableSubnetworksRequest other) {
if (other == com.google.container.v1beta1.ListUsableSubnetworksRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent project where subnetworks are usable.
* Specified in the format `projects/*`.
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering currently only supports equality on the networkProjectId and must
* be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId`
* is the project which owns the listed subnetworks. This defaults to the
* parent project ID.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The max number of results per page that should be returned. If the number
* of available results is larger than `page_size`, a `next_page_token` is
* returned which can be used to get the next page of results in subsequent
* requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a page token to use. Set this to the nextPageToken returned by
* previous list requests to get the next page of results.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.ListUsableSubnetworksRequest)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.ListUsableSubnetworksRequest)
private static final com.google.container.v1beta1.ListUsableSubnetworksRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.ListUsableSubnetworksRequest();
}
public static com.google.container.v1beta1.ListUsableSubnetworksRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListUsableSubnetworksRequest> PARSER =
new com.google.protobuf.AbstractParser<ListUsableSubnetworksRequest>() {
@java.lang.Override
public ListUsableSubnetworksRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListUsableSubnetworksRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListUsableSubnetworksRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.ListUsableSubnetworksRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,811 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/BatchReadTensorboardTimeSeriesDataRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/tensorboard_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for
* [TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.BatchReadTensorboardTimeSeriesData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest}
*/
public final class BatchReadTensorboardTimeSeriesDataRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest)
BatchReadTensorboardTimeSeriesDataRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchReadTensorboardTimeSeriesDataRequest.newBuilder() to construct.
private BatchReadTensorboardTimeSeriesDataRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchReadTensorboardTimeSeriesDataRequest() {
tensorboard_ = "";
timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchReadTensorboardTimeSeriesDataRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchReadTensorboardTimeSeriesDataRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchReadTensorboardTimeSeriesDataRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.class,
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.Builder.class);
}
public static final int TENSORBOARD_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object tensorboard_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The tensorboard.
*/
@java.lang.Override
public java.lang.String getTensorboard() {
java.lang.Object ref = tensorboard_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tensorboard_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for tensorboard.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTensorboardBytes() {
java.lang.Object ref = tensorboard_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tensorboard_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TIME_SERIES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList timeSeries_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the timeSeries.
*/
public com.google.protobuf.ProtocolStringList getTimeSeriesList() {
return timeSeries_;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of timeSeries.
*/
public int getTimeSeriesCount() {
return timeSeries_.size();
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The timeSeries at the given index.
*/
public java.lang.String getTimeSeries(int index) {
return timeSeries_.get(index);
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the timeSeries at the given index.
*/
public com.google.protobuf.ByteString getTimeSeriesBytes(int index) {
return timeSeries_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tensorboard_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tensorboard_);
}
for (int i = 0; i < timeSeries_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, timeSeries_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tensorboard_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tensorboard_);
}
{
int dataSize = 0;
for (int i = 0; i < timeSeries_.size(); i++) {
dataSize += computeStringSizeNoTag(timeSeries_.getRaw(i));
}
size += dataSize;
size += 1 * getTimeSeriesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest other =
(com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest) obj;
if (!getTensorboard().equals(other.getTensorboard())) return false;
if (!getTimeSeriesList().equals(other.getTimeSeriesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TENSORBOARD_FIELD_NUMBER;
hash = (53 * hash) + getTensorboard().hashCode();
if (getTimeSeriesCount() > 0) {
hash = (37 * hash) + TIME_SERIES_FIELD_NUMBER;
hash = (53 * hash) + getTimeSeriesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.BatchReadTensorboardTimeSeriesData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest)
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchReadTensorboardTimeSeriesDataRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchReadTensorboardTimeSeriesDataRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.class,
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.Builder
.class);
}
// Construct using
// com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tensorboard_ = "";
timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchReadTensorboardTimeSeriesDataRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest build() {
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest buildPartial() {
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest result =
new com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tensorboard_ = tensorboard_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
timeSeries_.makeImmutable();
result.timeSeries_ = timeSeries_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest) {
return mergeFrom(
(com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest other) {
if (other
== com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
.getDefaultInstance()) return this;
if (!other.getTensorboard().isEmpty()) {
tensorboard_ = other.tensorboard_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.timeSeries_.isEmpty()) {
if (timeSeries_.isEmpty()) {
timeSeries_ = other.timeSeries_;
bitField0_ |= 0x00000002;
} else {
ensureTimeSeriesIsMutable();
timeSeries_.addAll(other.timeSeries_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
tensorboard_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureTimeSeriesIsMutable();
timeSeries_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object tensorboard_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The tensorboard.
*/
public java.lang.String getTensorboard() {
java.lang.Object ref = tensorboard_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tensorboard_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for tensorboard.
*/
public com.google.protobuf.ByteString getTensorboardBytes() {
java.lang.Object ref = tensorboard_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tensorboard_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The tensorboard to set.
* @return This builder for chaining.
*/
public Builder setTensorboard(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tensorboard_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearTensorboard() {
tensorboard_ = getDefaultInstance().getTensorboard();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Tensorboard containing
* TensorboardTimeSeries to read data from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
* The TensorboardTimeSeries referenced by
* [time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series]
* must be sub resources of this Tensorboard.
* </pre>
*
* <code>
* string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for tensorboard to set.
* @return This builder for chaining.
*/
public Builder setTensorboardBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tensorboard_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList timeSeries_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureTimeSeriesIsMutable() {
if (!timeSeries_.isModifiable()) {
timeSeries_ = new com.google.protobuf.LazyStringArrayList(timeSeries_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the timeSeries.
*/
public com.google.protobuf.ProtocolStringList getTimeSeriesList() {
timeSeries_.makeImmutable();
return timeSeries_;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of timeSeries.
*/
public int getTimeSeriesCount() {
return timeSeries_.size();
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The timeSeries at the given index.
*/
public java.lang.String getTimeSeries(int index) {
return timeSeries_.get(index);
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the timeSeries at the given index.
*/
public com.google.protobuf.ByteString getTimeSeriesBytes(int index) {
return timeSeries_.getByteString(index);
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index to set the value at.
* @param value The timeSeries to set.
* @return This builder for chaining.
*/
public Builder setTimeSeries(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTimeSeriesIsMutable();
timeSeries_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The timeSeries to add.
* @return This builder for chaining.
*/
public Builder addTimeSeries(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTimeSeriesIsMutable();
timeSeries_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param values The timeSeries to add.
* @return This builder for chaining.
*/
public Builder addAllTimeSeries(java.lang.Iterable<java.lang.String> values) {
ensureTimeSeriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, timeSeries_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearTimeSeries() {
timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource names of the TensorboardTimeSeries to read data
* from. Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes of the timeSeries to add.
* @return This builder for chaining.
*/
public Builder addTimeSeriesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureTimeSeriesIsMutable();
timeSeries_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest)
private static final com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest();
}
public static com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest>
PARSER =
new com.google.protobuf.AbstractParser<BatchReadTensorboardTimeSeriesDataRequest>() {
@java.lang.Override
public BatchReadTensorboardTimeSeriesDataRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kylin | 36,933 | src/kylin-it/src/test/java/org/apache/kylin/auto/AutoBasicTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.auto;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.transaction.UnitOfWork;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.common.util.TestUtils;
import org.apache.kylin.guava30.shaded.common.base.Throwables;
import org.apache.kylin.guava30.shaded.common.collect.Lists;
import org.apache.kylin.metadata.cube.model.IndexEntity;
import org.apache.kylin.metadata.cube.model.IndexPlan;
import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
import org.apache.kylin.metadata.model.NDataModel;
import org.apache.kylin.metadata.model.NDataModelManager;
import org.apache.kylin.metadata.recommendation.entity.MeasureRecItemV2;
import org.apache.kylin.query.engine.data.QueryResult;
import org.apache.kylin.rec.AbstractContext;
import org.apache.kylin.rec.ModelOptProposer;
import org.apache.kylin.rec.ModelSelectProposer;
import org.apache.kylin.rec.SmartContext;
import org.apache.kylin.rec.SmartMaster;
import org.apache.kylin.rec.common.AccelerateInfo;
import org.apache.kylin.rec.util.AccelerationUtil;
import org.apache.kylin.util.ExecAndComp;
import org.apache.kylin.util.ExecAndCompExt;
import org.apache.kylin.util.MetadataTestUtils;
import org.apache.kylin.util.QueryResultComparator;
import org.apache.kylin.util.SuggestTestBase;
import org.apache.spark.sql.KapFunctions;
import org.apache.spark.sql.SparderEnv;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.udf.UdfManager;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.Assert;
import org.junit.Test;
import lombok.val;
@SuppressWarnings("squid:S2699")
public class AutoBasicTest extends SuggestTestBase {
@Test
public void testAutoSingleModel() throws Exception {
// 1. Create simple model with one fact table
String targetModelId;
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 0, 1);
AbstractContext context = proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
AbstractContext.ModelContext modelContext = modelContexts.get(0);
NDataModel dataModel = modelContext.getTargetModel();
Assert.assertNotNull(dataModel);
targetModelId = dataModel.getUuid();
Assert.assertEquals(1, dataModel.getAllTables().size());
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
Assert.assertNotNull(indexPlan);
}
// 2. Feed query with left join using same fact table, should update same model
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 1, 2);
AbstractContext context = proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
AbstractContext.ModelContext modelContext = modelContexts.get(0);
NDataModel dataModel = modelContext.getTargetModel();
Assert.assertNotNull(dataModel);
if (!TestUtils.isSkipBuild()) {
Assert.assertEquals(targetModelId, dataModel.getUuid());
}
Assert.assertEquals(2, dataModel.getAllTables().size());
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
Assert.assertNotNull(indexPlan);
}
// 3. Auto suggested model is able to serve related query
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 0, 3);
populateSSWithCSVData(kylinConfig, getProject(), SparderEnv.getSparkSession());
ExecAndCompExt.execAndCompare(queries, getProject(), ExecAndCompExt.CompareLevel.SAME, "default");
}
// 4. Feed bad queries
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql_bad", 0, 0);
AbstractContext context = proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
Assert.assertEquals(0, modelContexts.size());
}
// 5. Feed query with inner join using same fact table, should create another model
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 3, 4);
AbstractContext context = proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
AbstractContext.ModelContext modelContext = modelContexts.get(0);
NDataModel dataModel = modelContext.getTargetModel();
Assert.assertNotNull(dataModel);
if (!TestUtils.isSkipBuild()) {
Assert.assertNotEquals(targetModelId, dataModel.getUuid());
}
Assert.assertEquals(2, dataModel.getAllTables().size());
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
Assert.assertNotNull(indexPlan);
}
// 6. Finally, run all queries
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 0, 4);
populateSSWithCSVData(kylinConfig, getProject(), SparderEnv.getSparkSession());
ExecAndCompExt.execAndCompare(queries, getProject(), ExecAndCompExt.CompareLevel.SAME, "default");
}
FileUtils.deleteQuietly(new File("../kylin-it/metastore_db"));
}
@Test
public void testUsedColumnsIsTomb() {
String[] sqls = new String[] { "select lstg_format_name from test_kylin_fact group by lstg_format_name",
"select sum(price * item_count) from test_kylin_fact" };
val context = AccelerationUtil.runWithSmartContext(kylinConfig, getProject(), sqls, true);
Assert.assertFalse(context.getAccelerateInfoMap().get(sqls[0]).isFailed());
Assert.assertFalse(context.getAccelerateInfoMap().get(sqls[1]).isFailed());
NDataModel dataModel = context.getModelContexts().get(0).getTargetModel();
Assert.assertEquals(1, dataModel.getComputedColumnDescs().size());
UnitOfWork.doInTransactionWithRetry(() -> {
KylinConfig config = KylinConfig.getInstanceFromEnv();
NDataModelManager modelManager = NDataModelManager.getInstance(config, getProject());
modelManager.updateDataModel(dataModel.getUuid(), cp -> {
// delete computed column add a existing column
cp.getAllNamedColumns().forEach(column -> {
if (column.getAliasDotColumn().equalsIgnoreCase("test_kylin_fact.lstg_format_name")) {
column.setStatus(NDataModel.ColumnStatus.TOMB);
}
if (column.getAliasDotColumn().contains("CC_AUTO_")) {
column.setName("modified_cc_column");
column.setStatus(NDataModel.ColumnStatus.TOMB);
}
});
cp.getAllNamedColumns().get(cp.getAllNamedColumns().size() - 1).setStatus(NDataModel.ColumnStatus.TOMB);
cp.getComputedColumnDescs().clear();
cp.getComputedColumnUuids().clear();
cp.getAllMeasures().forEach(measure -> {
if (measure.getId() == 100001) {
measure.setTomb(true);
}
});
cp.setMvcc(cp.getMvcc() + 1);
});
return true;
}, getProject());
// verify update success
NDataModel updatedModel = NDataModelManager.getInstance(kylinConfig, getProject())
.getDataModelDesc(dataModel.getUuid());
Assert.assertTrue(updatedModel.getComputedColumnDescs().isEmpty());
List<NDataModel.NamedColumn> targetColumns = updatedModel.getAllNamedColumns().stream()
.filter(column -> column.getAliasDotColumn().equalsIgnoreCase("test_kylin_fact.lstg_format_name")
|| column.getAliasDotColumn().contains("CC_AUTO_"))
.collect(Collectors.toList());
Assert.assertEquals(2, targetColumns.size());
targetColumns.forEach(column -> {
Assert.assertFalse(column.isExist());
if (column.getAliasDotColumn().contains("CC_AUTO_")) {
Assert.assertEquals("modified_cc_column", column.getName());
}
});
Assert.assertTrue(updatedModel.getAllMeasures().get(1).isTomb());
// update model to semi-auto-mode
MetadataTestUtils.toSemiAutoMode(getProject());
val context3 = AccelerationUtil.genOptRec(kylinConfig, getProject(), sqls);
val accelerateInfoMap = context3.getAccelerateInfoMap();
Assert.assertFalse(accelerateInfoMap.get(sqls[0]).isNotSucceed());
Assert.assertFalse(accelerateInfoMap.get(sqls[1]).isNotSucceed());
List<AbstractContext.ModelContext> modelContexts = context3.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
AbstractContext.ModelContext modelContext = modelContexts.get(0);
NDataModel model = modelContext.getTargetModel();
List<NDataModel.Measure> allMeasures = model.getAllMeasures();
Assert.assertEquals(1, model.getComputedColumnDescs().size());
Assert.assertEquals(2, allMeasures.size());
Map<String, MeasureRecItemV2> measureRecItemMap = modelContext.getMeasureRecItemMap();
Assert.assertEquals(1, measureRecItemMap.size());
NDataModel.NamedColumn namedColumn = model.getAllNamedColumns().stream()
.filter(column -> column.getAliasDotColumn().contains("CC_AUTO_")) //
.findFirst().orElse(null);
Assert.assertNotNull(namedColumn);
Assert.assertEquals(NDataModel.ColumnStatus.EXIST, namedColumn.getStatus());
}
@Test
public void testAutoMultipleModel() throws Exception {
Map<String, IndexPlan> indexPlanOfParts = new HashMap<>();
Map<String, IndexPlan> indexPlanOfAll = new HashMap<>();
// 1. Feed queries part1
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 0, 2);
AbstractContext context = proposeWithSmartMaster(queries);
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
for (AbstractContext.ModelContext modelContext : modelContexts) {
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
indexPlanOfParts.put(indexPlan.getId(), indexPlan);
}
}
// 2. Feed queries part2
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 2, 4);
AbstractContext context = proposeWithSmartMaster(queries);
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
for (AbstractContext.ModelContext modelContext : modelContexts) {
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
indexPlanOfParts.put(indexPlan.getId(), indexPlan);
}
}
// 3. Retry all queries
{
List<Pair<String, String>> queries = fetchQueries("sql_for_automodeling/sql", 0, 4);
AbstractContext context = proposeWithSmartMaster(queries);
List<AbstractContext.ModelContext> modelContexts = context.getModelContexts();
for (AbstractContext.ModelContext modelContext : modelContexts) {
IndexPlan indexPlan = modelContext.getTargetIndexPlan();
indexPlanOfAll.put(indexPlan.getId(), indexPlan);
}
}
// 4. Suggested cuboids should be consistent no matter modeling with partial or full queries
{
Assert.assertEquals(indexPlanOfParts.size(), indexPlanOfAll.size());
for (IndexPlan actual : indexPlanOfAll.values()) {
IndexPlan expected = indexPlanOfParts.get(actual.getId());
Assert.assertNotNull(expected);
// compare cuboids
Assert.assertEquals(expected.getAllIndexes().size(), actual.getAllIndexes().size());
Assert.assertEquals(expected.getAllLayouts().size(), actual.getAllLayouts().size());
for (IndexEntity actualCuboid : actual.getAllIndexes()) {
IndexEntity expectedCuboid = expected.getIndexEntity(actualCuboid.getId());
MatcherAssert.assertThat(expectedCuboid.getDimensions(),
CoreMatchers.is(actualCuboid.getDimensions()));
MatcherAssert.assertThat(expectedCuboid.getMeasures(), CoreMatchers.is(actualCuboid.getMeasures()));
}
}
}
FileUtils.deleteQuietly(new File("../kylin-it/metastore_db"));
}
/**
* Test a query only only with count(*), can build and query from IndexPlan,
* don't move it.
*/
@Test
public void testCountStar() throws Exception {
new TestScenario(ExecAndCompExt.CompareLevel.SAME, "sql_for_automodeling/sql_count_star").execute();
}
@Test
public void testSelectTimestamp() throws Exception {
new TestScenario(ExecAndCompExt.CompareLevel.SAME, "sql_for_automodeling/sql_timestamp").execute();
}
@Test
public void testLimitCorrectness() throws Exception {
excludedSqlPatterns.addAll(loadWhiteListPatterns());
new TestScenario(ExecAndCompExt.CompareLevel.SAME, true, "query/sql").execute();
}
/**
* (auto-modeling) one sql generates many OlapContexts, but it failed to accelerate.
* The second OlapContext failed to propose cc when proposing target model.
*/
@Test
public void testPartialFailedWhenProposingWhenOneSqlAccelerating() {
KylinConfig kylinConfig = getTestConfig();
final String project = "newten";
String sql = "select l.cal_dt, sum(left_join_gvm) as left_join_sum, sum(inner_join_gvm) as inner_join_sum\n" //
+ "from (\n" //
+ " select test_kylin_fact.cal_dt, sum(price) as left_join_gvm\n" //
+ " from test_kylin_fact " //
+ " left JOIN edw.test_cal_dt as test_cal_dt ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt\n" //
+ " left JOIN test_category_groupings ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id " //
+ " AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n" //
+ " group by test_kylin_fact.cal_dt\n" //
+ " ) l inner join (\n" //
+ " select t2.cal_dt, SUM(PRICE_TOTAL + 1) as inner_join_gvm\n" //
+ " from (select price*item_count as price_total, cal_dt, leaf_categ_id, lstg_site_id from test_kylin_fact) t2 \n" //
+ " inner JOIN edw.test_cal_dt as test_cal_dt ON t2.cal_dt = test_cal_dt.cal_dt\n" //
+ " inner JOIN test_category_groupings ON t2.leaf_categ_id = test_category_groupings.leaf_categ_id " //
+ " AND t2.lstg_site_id = test_category_groupings.site_id\n" //
+ " group by t2.cal_dt\n" //
+ " ) i on l.cal_dt = i.cal_dt\n" //
+ "group by l.cal_dt";
val context = new SmartContext(kylinConfig, project, new String[] { sql });
SmartMaster smartMaster = new SmartMaster(context);
smartMaster.getProposer("SQLAnalysisProposer").execute();
smartMaster.getProposer("ModelSelectProposer").execute();
// assert everything is ok after select model
val accelerateInfoMap = smartMaster.getContext().getAccelerateInfoMap();
Assert.assertFalse(accelerateInfoMap.get(sql).isNotSucceed());
Assert.assertTrue(accelerateInfoMap.get(sql).getRelatedLayouts().isEmpty());
smartMaster.getProposer("ModelOptProposer").execute();
// assert it failed in the step of optimize model
final List<AbstractContext.ModelContext> modelContexts = smartMaster.getContext().getModelContexts();
val accelerateInfoMapAfterOpt = smartMaster.getContext().getAccelerateInfoMap();
Assert.assertEquals(2, modelContexts.size());
Assert.assertFalse(accelerateInfoMapAfterOpt.get(sql).isNotSucceed());
}
@Test
public void testSemiAutoWillCreateNewLayouts() {
KylinConfig kylinConfig = getTestConfig();
final String project = "newten";
String sql = "select test_kylin_fact.cal_dt, sum(price) as left_join_gvm\n" //
+ " from test_kylin_fact "
+ " left JOIN edw.test_cal_dt as test_cal_dt ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt\n" //
+ " left JOIN test_category_groupings ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id "
+ " AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n"
+ " group by test_kylin_fact.cal_dt";
val context = AccelerationUtil.runWithSmartContext(kylinConfig, project, new String[] { sql }, true);
// confirm auto-modeling is ok
val accelerateInfoMap = context.getAccelerateInfoMap();
val modelContexts = context.getModelContexts();
Assert.assertFalse(accelerateInfoMap.get(sql).isNotSucceed());
Assert.assertEquals(1, modelContexts.size());
IndexPlan targetIndexPlan = modelContexts.get(0).getTargetIndexPlan();
Assert.assertEquals(1, targetIndexPlan.getAllLayouts().size());
//set maintain model type to manual
MetadataTestUtils.toSemiAutoMode(project);
// propose model under the scene of manual maintain type
sql = "select l.cal_dt, sum(left_join_gvm) as left_join_sum, sum(inner_join_gvm) as inner_join_sum\n"
+ "from (\n" //
+ " select test_kylin_fact.cal_dt, sum(price) as left_join_gvm\n" //
+ " from test_kylin_fact "
+ " left JOIN edw.test_cal_dt as test_cal_dt ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt\n" //
+ " left JOIN test_category_groupings ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id "
+ " AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n"
+ " group by test_kylin_fact.cal_dt\n" //
+ " ) l inner join (\n" //
+ " select test_kylin_fact.cal_dt, sum(price+1) as inner_join_gvm\n" //
+ " from test_kylin_fact\n" //
+ " left JOIN edw.test_cal_dt as test_cal_dt ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt\n"
+ " left JOIN test_category_groupings ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id "
+ " AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n"
+ " group by test_kylin_fact.cal_dt\n" //
+ " ) i on l.cal_dt = i.cal_dt\n" //
+ "group by l.cal_dt";
val context2 = AccelerationUtil.runModelReuseContext(kylinConfig, project, new String[] { sql });
// assert everything is ok after optimize model
val modelContextsOfSemi = context2.getModelContexts();
Assert.assertEquals(1, modelContextsOfSemi.size());
IndexPlan indexPlanOfSemi = modelContextsOfSemi.get(0).getTargetIndexPlan();
Assert.assertEquals(2, indexPlanOfSemi.getAllLayouts().size());
val accelerationMapOfSemiMode = context2.getAccelerateInfoMap();
Assert.assertFalse(accelerationMapOfSemiMode.get(sql).isNotSucceed());
Assert.assertEquals(2, accelerationMapOfSemiMode.get(sql).getRelatedLayouts().size());
}
@Test
public void testNoCompatibleModelToReuse() {
String[] sqls = { "select cal_dt from test_kylin_fact",
"select lstg_format_name from test_kylin_fact inner join edw.test_cal_dt on test_kylin_fact.cal_dt = test_cal_dt.cal_dt" };
val context = AccelerationUtil.runWithSmartContext(getTestConfig(), "newten", new String[] { sqls[0] }, true);
val modelContexts = context.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
Assert.assertFalse(context.getAccelerateInfoMap().get(sqls[0]).isNotSucceed());
val context2 = AccelerationUtil.runModelReuseContext(getTestConfig(), "newten", sqls);
val modelContexts2 = context2.getModelContexts();
Assert.assertEquals(2, modelContexts2.size());
Assert.assertFalse(context2.getAccelerateInfoMap().get(sqls[0]).isNotSucceed());
AccelerateInfo accelerateInfo = context2.getAccelerateInfoMap().get(sqls[1]);
Assert.assertTrue(accelerateInfo.isNotSucceed());
Assert.assertEquals(ModelSelectProposer.NO_MODEL_MATCH_PENDING_MSG, accelerateInfo.getPendingMsg());
Assert.assertEquals(ModelOptProposer.NO_COMPATIBLE_MODEL_MSG,
Throwables.getRootCause(accelerateInfo.getFailedCause()).getMessage());
}
@Test
public void testReuseAndCreateNewModel() {
String[] sqls = { "select cal_dt from test_kylin_fact",
"select cal_dt, lstg_format_name, sum(price * 0.8) from test_kylin_fact group by cal_dt, lstg_format_name",
"select lstg_format_name, price from test_kylin_fact inner join edw.test_cal_dt on test_kylin_fact.cal_dt = test_cal_dt.cal_dt" };
val context = AccelerationUtil.runWithSmartContext(getTestConfig(), "newten", new String[] { sqls[0] }, true);
val modelContexts = context.getModelContexts();
Assert.assertEquals(1, modelContexts.size());
NDataModel targetModel = modelContexts.get(0).getTargetModel();
Assert.assertNotNull(targetModel);
Assert.assertFalse(context.getAccelerateInfoMap().get(sqls[0]).isNotSucceed());
val context2 = AccelerationUtil.runModelReuseContext(getTestConfig(), "newten", sqls, true);
val modelContexts2 = context2.getModelContexts();
Assert.assertEquals(2, modelContexts2.size());
Assert.assertFalse(context2.getAccelerateInfoMap().get(sqls[0]).isNotSucceed());
Assert.assertFalse(context2.getAccelerateInfoMap().get(sqls[1]).isNotSucceed());
Assert.assertFalse(context2.getAccelerateInfoMap().get(sqls[2]).isNotSucceed());
AbstractContext.ModelContext modelContext1 = modelContexts2.get(0);
Assert.assertEquals("AUTO_MODEL_TEST_KYLIN_FACT_1", modelContext1.getTargetModel().getAlias());
Assert.assertEquals(1, modelContext1.getCcRecItemMap().size());
Assert.assertEquals(1, modelContext1.getDimensionRecItemMap().size());
Assert.assertEquals(1, modelContext1.getMeasureRecItemMap().size());
Assert.assertEquals(1, modelContext1.getIndexRexItemMap().size());
AbstractContext.ModelContext modelContext2 = modelContexts2.get(1);
Assert.assertEquals("AUTO_MODEL_TEST_KYLIN_FACT_2", modelContext2.getTargetModel().getAlias());
Assert.assertEquals(0, modelContext2.getCcRecItemMap().size());
Assert.assertEquals(2, modelContext2.getDimensionRecItemMap().size());
Assert.assertEquals(0, modelContext2.getMeasureRecItemMap().size());
Assert.assertEquals(1, modelContext2.getIndexRexItemMap().size());
}
@Test
public void testIndexReducer() {
// use smart-model to prepare a model
KylinConfig kylinConfig = getTestConfig();
String project = getProject();
String[] sqls = {
"select LSTG_FORMAT_NAME,slr_segment_cd ,sum(price) as GMV from test_kylin_fact\n"
+ " group by LSTG_FORMAT_NAME ,slr_segment_cd",
"select LSTG_FORMAT_NAME,slr_segment_cd ,sum(price) as GMV, min(price) as MMV from test_kylin_fact\n"
+ " group by LSTG_FORMAT_NAME ,slr_segment_cd" };
val context = AccelerationUtil.runWithSmartContext(kylinConfig, project, sqls, true);
Map<String, AccelerateInfo> accelerationInfoMap = context.getAccelerateInfoMap();
val relatedLayoutsForSql0 = accelerationInfoMap.get(sqls[0]).getRelatedLayouts();
val relatedLayoutsForSql1 = accelerationInfoMap.get(sqls[1]).getRelatedLayouts();
long layoutForSql0 = relatedLayoutsForSql0.iterator().next().getLayoutId();
long layoutForSql1 = relatedLayoutsForSql1.iterator().next().getLayoutId();
Assert.assertEquals(layoutForSql0, layoutForSql1);
// set to semi-auto to check tailoring layouts
MetadataTestUtils.toSemiAutoMode(project);
AbstractContext.ModelContext modelContext = context.getModelContexts().get(0);
NDataModel targetModel = modelContext.getTargetModel();
NIndexPlanManager.getInstance(kylinConfig, project).updateIndexPlan(targetModel.getUuid(),
copyForWrite -> copyForWrite.setIndexes(Lists.newArrayList()));
val context2 = AccelerationUtil.genOptRec(kylinConfig, project, sqls);
accelerationInfoMap = context2.getAccelerateInfoMap();
val relatedLayoutsSemiForSql0 = accelerationInfoMap.get(sqls[0]).getRelatedLayouts();
val relatedLayoutsSemiForSql1 = accelerationInfoMap.get(sqls[1]).getRelatedLayouts();
long layoutSemiForSql0 = relatedLayoutsSemiForSql0.iterator().next().getLayoutId();
long layoutSemiForSql1 = relatedLayoutsSemiForSql1.iterator().next().getLayoutId();
Assert.assertEquals(layoutSemiForSql0, layoutSemiForSql1);
}
@Test
public void testPercentileApprox() throws IOException, InterruptedException {
List<Pair<String, String>> queries = fetchQueries("query/sql_percentile_2", 0, 0);
String sql01 = queries.get(0).getSecond();
String sql02 = queries.get(1).getSecond();
double actualValue = 2467.5;
double tDigestValue = 2467.5;
double quantileSummaryValue = 2273;
// check ss
SparkSession originSS = SparderEnv.getSparkSession();
getTestConfig().setProperty("kylin.query.percentile-approx-algorithm", "t-digest");
SparderEnv.doInitSpark();
Assert.assertTrue(SparderEnv.getSparkSession().sessionState().functionRegistry()
.functionExists(KapFunctions.percentileFunction().name()));
SparderEnv.setSparkSession(originSS);
getTestConfig().setProperty("kylin.query.percentile-approx-algorithm", "");
// PushDown
populateSSWithCSVData(kylinConfig, getProject(), SparderEnv.getSparkSession());
QueryResult sparkResult = ExecAndComp.queryWithSpark(getProject(), sql01, "default", queries.get(0).getFirst(),
false);
List<Double> values = sparkResult.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(actualValue, quantileSummaryValue), values);
sparkResult = ExecAndComp.queryWithSpark(getProject(), sql02, "default", queries.get(0).getFirst(), false);
values = sparkResult.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(actualValue, quantileSummaryValue), values);
try {
UdfManager.register(ss, KapFunctions.percentileFunction());
sparkResult = ExecAndComp.queryWithSpark(getProject(), sql01, "default", queries.get(0).getFirst(), false);
values = sparkResult.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(actualValue, tDigestValue), values);
sparkResult = ExecAndComp.queryWithSpark(getProject(), sql02, "default", queries.get(0).getFirst(), false);
values = sparkResult.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(actualValue, tDigestValue), values);
} finally {
ss.sessionState().functionRegistry().dropFunction(KapFunctions.percentileFunction().name());
}
// Build model
proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
// PreAggregate
ExecAndComp.EnhancedQueryResult result01 = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default",
sql01);
values = result01.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(tDigestValue, tDigestValue), values);
// PostAggregate
ExecAndComp.EnhancedQueryResult result02 = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default",
sql02);
values = result02.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(quantileSummaryValue, quantileSummaryValue), values);
getTestConfig().setProperty("kylin.query.percentile-approx-algorithm", "t-digest");
result02 = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default", sql02);
values = result02.getRowsIterable().iterator().next().stream().map(Double::parseDouble)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList(tDigestValue, tDigestValue), values);
}
@Test
public void testStringPlus() throws IOException, InterruptedException {
Assert.assertFalse(KylinConfig.getInstanceFromEnv().isCalciteCompatibleWithMsSqlPlusEnabled());
List<Pair<String, String>> queries = fetchQueries("query/sql_string_plus", 0, 1);
String sql = queries.get(0).getSecond();
proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
ExecAndComp.EnhancedQueryResult result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "left", sql);
QueryResult expect = new QueryResult(
Arrays.asList(Arrays.asList("3.3", "1.21", "-0.7211000000000001", null, null, null, "3"),
Arrays.asList("712.0", "712.0", "712.0", null, null, null, null)),
6, result.getColumns());
Assert.assertTrue(
QueryResultComparator.compareResults(expect, result.getQueryResult(), ExecAndComp.CompareLevel.SAME));
}
@Test
public void testStringPlus2() throws InterruptedException, IOException {
Assert.assertFalse(KylinConfig.getInstanceFromEnv().isCalciteCompatibleWithMsSqlPlusEnabled());
List<Pair<String, String>> queries = fetchQueries("query/sql_string_plus", 0, 1);
String sql = queries.get(0).getSecond();
overwriteSystemProp("calcite.compatible-with-mssql-plus", "true");
proposeWithSmartMaster(queries);
buildAllModels(kylinConfig, getProject());
ExecAndComp.EnhancedQueryResult result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "left", sql);
QueryResult expected = new QueryResult(
Arrays.asList(Arrays.asList("3.3", "1.21", "1.3-2.0211", null, "a1.33", "ab", "12"),
Arrays.asList("712.0", "712.0", "356356", null, "Auction356", "AuctionAuction", "1Auction")),
6, result.getColumns());
Assert.assertTrue(
QueryResultComparator.compareResults(expected, result.getQueryResult(), ExecAndComp.CompareLevel.SAME));
FileUtils.deleteQuietly(new File("../kylin-it/metastore_db"));
}
@Test
public void testNumberPlus() throws IOException {
Assert.assertFalse(KylinConfig.getInstanceFromEnv().isCalciteCompatibleWithMsSqlPlusEnabled());
List<Pair<String, String>> queries = fetchQueries("query/sql_number_plus", 0, 2);
String sql = queries.get(0).getSecond();
ExecAndComp.EnhancedQueryResult result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "left", sql);
QueryResult expect = new QueryResult(Collections.singletonList(Collections.singletonList("50")), 1,
result.getColumns());
Assert.assertTrue(
QueryResultComparator.compareResults(expect, result.getQueryResult(), ExecAndComp.CompareLevel.SAME));
sql = queries.get(1).getSecond();
result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "left", sql);
expect = new QueryResult(Collections.singletonList(Collections.singletonList("50")), 1, result.getColumns());
Assert.assertTrue(
QueryResultComparator.compareResults(expect, result.getQueryResult(), ExecAndComp.CompareLevel.SAME));
}
@Test
public void testConstantStringPlus() throws IOException {
Assert.assertFalse(KylinConfig.getInstanceFromEnv().isCalciteCompatibleWithMsSqlPlusEnabled());
List<Pair<String, String>> queries = fetchQueries("query/sql_string_plus_constant", 0, 1);
String sql = queries.get(0).getSecond();
List<String> expect = new ArrayList<>(Arrays.asList("3", "3.0", null, "3.0", "3.0", null, null, null, null,
"3.0", "3.0", null, "3.0", "3.0"));
for (int i = expect.size(); i < 27; i++)
expect.add(null);
ExecAndComp.EnhancedQueryResult result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default", sql);
List<String> values = result.getQueryResult().getRowsIterable().iterator().next();
Assert.assertEquals(expect, values);
overwriteSystemProp("calcite.compatible-with-mssql-plus", "true");
expect = new ArrayList<>(Arrays.asList("3", "3.0", null, "3.0", "3.0", null, null, null, null, //
"3.0", "3.0", null, "12.0", "111", "11a", null, "1a1", "1aa", //
null, null, null, null, "a11", "a1a", null, "aa1", "aaa"));
result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default", sql);
values = result.getQueryResult().getRowsIterable().iterator().next();
Assert.assertEquals(expect, values);
}
@Test
public void testQueryWithDuplicatedJoinKey() throws IOException, InterruptedException {
List<Pair<String, String>> queries = fetchQueries("query/sql_duplicated_join_key", 0, 0);
String sql = queries.get(2).getSecond();
proposeWithSmartMaster(queries.subList(0, 2));
buildAllModels(kylinConfig, getProject());
ExecAndComp.EnhancedQueryResult result = ExecAndCompExt.queryModelWithOlapContext(getProject(), "default", sql);
List<String> values = result.getQueryResult().getRowsIterable().iterator().next();
Assert.assertEquals(2, result.getOlapContexts().size());
Assert.assertEquals(Collections.singletonList("14"), values);
FileUtils.deleteQuietly(new File("../kylin-it/metastore_db"));
}
private AbstractContext proposeWithSmartMaster(List<Pair<String, String>> queries) {
String[] sqls = queries.stream().map(Pair::getSecond).toArray(String[]::new);
return AccelerationUtil.runWithSmartContext(kylinConfig, getProject(), sqls, true);
}
}
|
oracle/graal | 37,040 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/lir/amd64/AMD64MathSinOp.java | /*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, Intel Corporation. All rights reserved.
* Intel Math Library (LIBM) Source Code
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.lir.amd64;
import static jdk.graal.compiler.lir.amd64.AMD64LIRHelper.pointerConstant;
import static jdk.graal.compiler.lir.amd64.AMD64LIRHelper.recordExternalAddress;
import static jdk.vm.ci.amd64.AMD64.r10;
import static jdk.vm.ci.amd64.AMD64.r11;
import static jdk.vm.ci.amd64.AMD64.r8;
import static jdk.vm.ci.amd64.AMD64.r9;
import static jdk.vm.ci.amd64.AMD64.rax;
import static jdk.vm.ci.amd64.AMD64.rbx;
import static jdk.vm.ci.amd64.AMD64.rcx;
import static jdk.vm.ci.amd64.AMD64.rdi;
import static jdk.vm.ci.amd64.AMD64.rdx;
import static jdk.vm.ci.amd64.AMD64.rsi;
import static jdk.vm.ci.amd64.AMD64.rsp;
import static jdk.vm.ci.amd64.AMD64.xmm0;
import static jdk.vm.ci.amd64.AMD64.xmm1;
import static jdk.vm.ci.amd64.AMD64.xmm2;
import static jdk.vm.ci.amd64.AMD64.xmm3;
import static jdk.vm.ci.amd64.AMD64.xmm4;
import static jdk.vm.ci.amd64.AMD64.xmm5;
import static jdk.vm.ci.amd64.AMD64.xmm6;
import static jdk.vm.ci.amd64.AMD64.xmm7;
import jdk.graal.compiler.asm.Label;
import jdk.graal.compiler.asm.amd64.AMD64Address;
import jdk.graal.compiler.asm.amd64.AMD64Assembler.ConditionFlag;
import jdk.graal.compiler.asm.amd64.AMD64MacroAssembler;
import jdk.graal.compiler.lir.LIRInstructionClass;
import jdk.graal.compiler.lir.SyncPort;
import jdk.graal.compiler.lir.asm.ArrayDataPointerConstant;
import jdk.graal.compiler.lir.asm.CompilationResultBuilder;
import jdk.vm.ci.amd64.AMD64;
/**
* <pre>
* ALGORITHM DESCRIPTION - SIN()
* ---------------------
*
* 1. RANGE REDUCTION
*
* We perform an initial range reduction from X to r with
*
* X =~= N * pi/32 + r
*
* so that |r| <= pi/64 + epsilon. We restrict inputs to those
* where |N| <= 932560. Beyond this, the range reduction is
* insufficiently accurate. For extremely small inputs,
* denormalization can occur internally, impacting performance.
* This means that the main path is actually only taken for
* 2^-252 <= |X| < 90112.
*
* To avoid branches, we perform the range reduction to full
* accuracy each time.
*
* X - N * (P_1 + P_2 + P_3)
*
* where P_1 and P_2 are 32-bit numbers (so multiplication by N
* is exact) and P_3 is a 53-bit number. Together, these
* approximate pi well enough for all cases in the restricted
* range.
*
* The main reduction sequence is:
*
* y = 32/pi * x
* N = integer(y)
* (computed by adding and subtracting off SHIFTER)
*
* m_1 = N * P_1
* m_2 = N * P_2
* r_1 = x - m_1
* r = r_1 - m_2
* (this r can be used for most of the calculation)
*
* c_1 = r_1 - r
* m_3 = N * P_3
* c_2 = c_1 - m_2
* c = c_2 - m_3
*
* 2. MAIN ALGORITHM
*
* The algorithm uses a table lookup based on B = M * pi / 32
* where M = N mod 64. The stored values are:
* sigma closest power of 2 to cos(B)
* C_hl 53-bit cos(B) - sigma
* S_hi + S_lo 2 * 53-bit sin(B)
*
* The computation is organized as follows:
*
* sin(B + r + c) = [sin(B) + sigma * r] +
* r * (cos(B) - sigma) +
* sin(B) * [cos(r + c) - 1] +
* cos(B) * [sin(r + c) - r]
*
* which is approximately:
*
* [S_hi + sigma * r] +
* C_hl * r +
* S_lo + S_hi * [(cos(r) - 1) - r * c] +
* (C_hl + sigma) * [(sin(r) - r) + c]
*
* and this is what is actually computed. We separate this sum
* into four parts:
*
* hi + med + pols + corr
*
* where
*
* hi = S_hi + sigma r
* med = C_hl * r
* pols = S_hi * (cos(r) - 1) + (C_hl + sigma) * (sin(r) - r)
* corr = S_lo + c * ((C_hl + sigma) - S_hi * r)
*
* 3. POLYNOMIAL
*
* The polynomial S_hi * (cos(r) - 1) + (C_hl + sigma) *
* (sin(r) - r) can be rearranged freely, since it is quite
* small, so we exploit parallelism to the fullest.
*
* psc4 = SC_4 * r_1
* msc4 = psc4 * r
* r2 = r * r
* msc2 = SC_2 * r2
* r4 = r2 * r2
* psc3 = SC_3 + msc4
* psc1 = SC_1 + msc2
* msc3 = r4 * psc3
* sincospols = psc1 + msc3
* pols = sincospols *
* <S_hi * r^2 | (C_hl + sigma) * r^3>
*
* 4. CORRECTION TERM
*
* This is where the "c" component of the range reduction is
* taken into account; recall that just "r" is used for most of
* the calculation.
*
* -c = m_3 - c_2
* -d = S_hi * r - (C_hl + sigma)
* corr = -c * -d + S_lo
*
* 5. COMPENSATED SUMMATIONS
*
* The two successive compensated summations add up the high
* and medium parts, leaving just the low parts to add up at
* the end.
*
* rs = sigma * r
* res_int = S_hi + rs
* k_0 = S_hi - res_int
* k_2 = k_0 + rs
* med = C_hl * r
* res_hi = res_int + med
* k_1 = res_int - res_hi
* k_3 = k_1 + med
*
* 6. FINAL SUMMATION
*
* We now add up all the small parts:
*
* res_lo = pols(hi) + pols(lo) + corr + k_1 + k_3
*
* Now the overall result is just:
*
* res_hi + res_lo
*
* 7. SMALL ARGUMENTS
*
* If |x| < SNN (SNN meaning the smallest normal number), we
* simply perform 0.1111111 cdots 1111 * x. For SNN <= |x|, we
* do 2^-55 * (2^55 * x - x).
*
* Special cases:
* sin(NaN) = quiet NaN, and raise invalid exception
* sin(INF) = NaN and raise invalid exception
* sin(+/-0) = +/-0
* </pre>
*/
// @formatter:off
@SyncPort(from = "https://github.com/openjdk/jdk/blob/4994bd594299e91e804438692e068b1c5dd5cc02/src/hotspot/cpu/x86/stubGenerator_x86_64_sin.cpp#L30-L649",
sha1 = "e7388f19ab70c245d50ca43bf2f1df3e6e956685")
// @formatter:on
public final class AMD64MathSinOp extends AMD64MathIntrinsicUnaryOp {
public static final LIRInstructionClass<AMD64MathSinOp> TYPE = LIRInstructionClass.create(AMD64MathSinOp.class);
public AMD64MathSinOp() {
super(TYPE, /* GPR */ rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r10, r11,
/* XMM */ xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7);
}
private static ArrayDataPointerConstant onehalf = pointerConstant(16, new int[]{
// @formatter:off
0x00000000, 0x3fe00000, 0x00000000, 0x3fe00000
// @formatter:on
});
private static ArrayDataPointerConstant p2 = pointerConstant(16, new int[]{
// @formatter:off
0x1a600000, 0x3d90b461, 0x1a600000, 0x3d90b461
// @formatter:on
});
private static ArrayDataPointerConstant sc4 = pointerConstant(16, new int[]{
// @formatter:off
0xa556c734, 0x3ec71de3, 0x1a01a01a, 0x3efa01a0
// @formatter:on
});
private static ArrayDataPointerConstant ctable = pointerConstant(16, new int[]{
// @formatter:off
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x3ff00000, 0x176d6d31, 0xbf73b92e,
0xbc29b42c, 0x3fb917a6, 0xe0000000, 0xbc3e2718, 0x00000000,
0x3ff00000, 0x011469fb, 0xbf93ad06, 0x3c69a60b, 0x3fc8f8b8,
0xc0000000, 0xbc626d19, 0x00000000, 0x3ff00000, 0x939d225a,
0xbfa60bea, 0x2ed59f06, 0x3fd29406, 0xa0000000, 0xbc75d28d,
0x00000000, 0x3ff00000, 0x866b95cf, 0xbfb37ca1, 0xa6aea963,
0x3fd87de2, 0xe0000000, 0xbc672ced, 0x00000000, 0x3ff00000,
0x73fa1279, 0xbfbe3a68, 0x3806f63b, 0x3fde2b5d, 0x20000000,
0x3c5e0d89, 0x00000000, 0x3ff00000, 0x5bc57974, 0xbfc59267,
0x39ae68c8, 0x3fe1c73b, 0x20000000, 0x3c8b25dd, 0x00000000,
0x3ff00000, 0x53aba2fd, 0xbfcd0dfe, 0x25091dd6, 0x3fe44cf3,
0x20000000, 0x3c68076a, 0x00000000, 0x3ff00000, 0x99fcef32,
0x3fca8279, 0x667f3bcd, 0x3fe6a09e, 0x20000000, 0xbc8bdd34,
0x00000000, 0x3fe00000, 0x94247758, 0x3fc133cc, 0x6b151741,
0x3fe8bc80, 0x20000000, 0xbc82c5e1, 0x00000000, 0x3fe00000,
0x9ae68c87, 0x3fac73b3, 0x290ea1a3, 0x3fea9b66, 0xe0000000,
0x3c39f630, 0x00000000, 0x3fe00000, 0x7f909c4e, 0xbf9d4a2c,
0xf180bdb1, 0x3fec38b2, 0x80000000, 0xbc76e0b1, 0x00000000,
0x3fe00000, 0x65455a75, 0xbfbe0875, 0xcf328d46, 0x3fed906b,
0x20000000, 0x3c7457e6, 0x00000000, 0x3fe00000, 0x76acf82d,
0x3fa4a031, 0x56c62dda, 0x3fee9f41, 0xe0000000, 0x3c8760b1,
0x00000000, 0x3fd00000, 0x0e5967d5, 0xbfac1d1f, 0xcff75cb0,
0x3fef6297, 0x20000000, 0x3c756217, 0x00000000, 0x3fd00000,
0x0f592f50, 0xbf9ba165, 0xa3d12526, 0x3fefd88d, 0x40000000,
0xbc887df6, 0x00000000, 0x3fc00000, 0x00000000, 0x00000000,
0x00000000, 0x3ff00000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x0f592f50, 0x3f9ba165, 0xa3d12526, 0x3fefd88d,
0x40000000, 0xbc887df6, 0x00000000, 0xbfc00000, 0x0e5967d5,
0x3fac1d1f, 0xcff75cb0, 0x3fef6297, 0x20000000, 0x3c756217,
0x00000000, 0xbfd00000, 0x76acf82d, 0xbfa4a031, 0x56c62dda,
0x3fee9f41, 0xe0000000, 0x3c8760b1, 0x00000000, 0xbfd00000,
0x65455a75, 0x3fbe0875, 0xcf328d46, 0x3fed906b, 0x20000000,
0x3c7457e6, 0x00000000, 0xbfe00000, 0x7f909c4e, 0x3f9d4a2c,
0xf180bdb1, 0x3fec38b2, 0x80000000, 0xbc76e0b1, 0x00000000,
0xbfe00000, 0x9ae68c87, 0xbfac73b3, 0x290ea1a3, 0x3fea9b66,
0xe0000000, 0x3c39f630, 0x00000000, 0xbfe00000, 0x94247758,
0xbfc133cc, 0x6b151741, 0x3fe8bc80, 0x20000000, 0xbc82c5e1,
0x00000000, 0xbfe00000, 0x99fcef32, 0xbfca8279, 0x667f3bcd,
0x3fe6a09e, 0x20000000, 0xbc8bdd34, 0x00000000, 0xbfe00000,
0x53aba2fd, 0x3fcd0dfe, 0x25091dd6, 0x3fe44cf3, 0x20000000,
0x3c68076a, 0x00000000, 0xbff00000, 0x5bc57974, 0x3fc59267,
0x39ae68c8, 0x3fe1c73b, 0x20000000, 0x3c8b25dd, 0x00000000,
0xbff00000, 0x73fa1279, 0x3fbe3a68, 0x3806f63b, 0x3fde2b5d,
0x20000000, 0x3c5e0d89, 0x00000000, 0xbff00000, 0x866b95cf,
0x3fb37ca1, 0xa6aea963, 0x3fd87de2, 0xe0000000, 0xbc672ced,
0x00000000, 0xbff00000, 0x939d225a, 0x3fa60bea, 0x2ed59f06,
0x3fd29406, 0xa0000000, 0xbc75d28d, 0x00000000, 0xbff00000,
0x011469fb, 0x3f93ad06, 0x3c69a60b, 0x3fc8f8b8, 0xc0000000,
0xbc626d19, 0x00000000, 0xbff00000, 0x176d6d31, 0x3f73b92e,
0xbc29b42c, 0x3fb917a6, 0xe0000000, 0xbc3e2718, 0x00000000,
0xbff00000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0xbff00000, 0x176d6d31,
0x3f73b92e, 0xbc29b42c, 0xbfb917a6, 0xe0000000, 0x3c3e2718,
0x00000000, 0xbff00000, 0x011469fb, 0x3f93ad06, 0x3c69a60b,
0xbfc8f8b8, 0xc0000000, 0x3c626d19, 0x00000000, 0xbff00000,
0x939d225a, 0x3fa60bea, 0x2ed59f06, 0xbfd29406, 0xa0000000,
0x3c75d28d, 0x00000000, 0xbff00000, 0x866b95cf, 0x3fb37ca1,
0xa6aea963, 0xbfd87de2, 0xe0000000, 0x3c672ced, 0x00000000,
0xbff00000, 0x73fa1279, 0x3fbe3a68, 0x3806f63b, 0xbfde2b5d,
0x20000000, 0xbc5e0d89, 0x00000000, 0xbff00000, 0x5bc57974,
0x3fc59267, 0x39ae68c8, 0xbfe1c73b, 0x20000000, 0xbc8b25dd,
0x00000000, 0xbff00000, 0x53aba2fd, 0x3fcd0dfe, 0x25091dd6,
0xbfe44cf3, 0x20000000, 0xbc68076a, 0x00000000, 0xbff00000,
0x99fcef32, 0xbfca8279, 0x667f3bcd, 0xbfe6a09e, 0x20000000,
0x3c8bdd34, 0x00000000, 0xbfe00000, 0x94247758, 0xbfc133cc,
0x6b151741, 0xbfe8bc80, 0x20000000, 0x3c82c5e1, 0x00000000,
0xbfe00000, 0x9ae68c87, 0xbfac73b3, 0x290ea1a3, 0xbfea9b66,
0xe0000000, 0xbc39f630, 0x00000000, 0xbfe00000, 0x7f909c4e,
0x3f9d4a2c, 0xf180bdb1, 0xbfec38b2, 0x80000000, 0x3c76e0b1,
0x00000000, 0xbfe00000, 0x65455a75, 0x3fbe0875, 0xcf328d46,
0xbfed906b, 0x20000000, 0xbc7457e6, 0x00000000, 0xbfe00000,
0x76acf82d, 0xbfa4a031, 0x56c62dda, 0xbfee9f41, 0xe0000000,
0xbc8760b1, 0x00000000, 0xbfd00000, 0x0e5967d5, 0x3fac1d1f,
0xcff75cb0, 0xbfef6297, 0x20000000, 0xbc756217, 0x00000000,
0xbfd00000, 0x0f592f50, 0x3f9ba165, 0xa3d12526, 0xbfefd88d,
0x40000000, 0x3c887df6, 0x00000000, 0xbfc00000, 0x00000000,
0x00000000, 0x00000000, 0xbff00000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x0f592f50, 0xbf9ba165, 0xa3d12526,
0xbfefd88d, 0x40000000, 0x3c887df6, 0x00000000, 0x3fc00000,
0x0e5967d5, 0xbfac1d1f, 0xcff75cb0, 0xbfef6297, 0x20000000,
0xbc756217, 0x00000000, 0x3fd00000, 0x76acf82d, 0x3fa4a031,
0x56c62dda, 0xbfee9f41, 0xe0000000, 0xbc8760b1, 0x00000000,
0x3fd00000, 0x65455a75, 0xbfbe0875, 0xcf328d46, 0xbfed906b,
0x20000000, 0xbc7457e6, 0x00000000, 0x3fe00000, 0x7f909c4e,
0xbf9d4a2c, 0xf180bdb1, 0xbfec38b2, 0x80000000, 0x3c76e0b1,
0x00000000, 0x3fe00000, 0x9ae68c87, 0x3fac73b3, 0x290ea1a3,
0xbfea9b66, 0xe0000000, 0xbc39f630, 0x00000000, 0x3fe00000,
0x94247758, 0x3fc133cc, 0x6b151741, 0xbfe8bc80, 0x20000000,
0x3c82c5e1, 0x00000000, 0x3fe00000, 0x99fcef32, 0x3fca8279,
0x667f3bcd, 0xbfe6a09e, 0x20000000, 0x3c8bdd34, 0x00000000,
0x3fe00000, 0x53aba2fd, 0xbfcd0dfe, 0x25091dd6, 0xbfe44cf3,
0x20000000, 0xbc68076a, 0x00000000, 0x3ff00000, 0x5bc57974,
0xbfc59267, 0x39ae68c8, 0xbfe1c73b, 0x20000000, 0xbc8b25dd,
0x00000000, 0x3ff00000, 0x73fa1279, 0xbfbe3a68, 0x3806f63b,
0xbfde2b5d, 0x20000000, 0xbc5e0d89, 0x00000000, 0x3ff00000,
0x866b95cf, 0xbfb37ca1, 0xa6aea963, 0xbfd87de2, 0xe0000000,
0x3c672ced, 0x00000000, 0x3ff00000, 0x939d225a, 0xbfa60bea,
0x2ed59f06, 0xbfd29406, 0xa0000000, 0x3c75d28d, 0x00000000,
0x3ff00000, 0x011469fb, 0xbf93ad06, 0x3c69a60b, 0xbfc8f8b8,
0xc0000000, 0x3c626d19, 0x00000000, 0x3ff00000, 0x176d6d31,
0xbf73b92e, 0xbc29b42c, 0xbfb917a6, 0xe0000000, 0x3c3e2718,
0x00000000, 0x3ff00000
// @formatter:on
});
private static ArrayDataPointerConstant sc2 = pointerConstant(16, new int[]{
// @formatter:off
0x11111111, 0x3f811111, 0x55555555, 0x3fa55555
// @formatter:on
});
private static ArrayDataPointerConstant sc3 = pointerConstant(16, new int[]{
// @formatter:off
0x1a01a01a, 0xbf2a01a0, 0x16c16c17, 0xbf56c16c
// @formatter:on
});
private static ArrayDataPointerConstant sc1 = pointerConstant(16, new int[]{
// @formatter:off
0x55555555, 0xbfc55555, 0x00000000, 0xbfe00000
// @formatter:on
});
private static ArrayDataPointerConstant piInvTable = pointerConstant(16, new int[]{
// @formatter:off
0x00000000, 0x00000000, 0xa2f9836e, 0x4e441529, 0xfc2757d1,
0xf534ddc0, 0xdb629599, 0x3c439041, 0xfe5163ab, 0xdebbc561,
0xb7246e3a, 0x424dd2e0, 0x06492eea, 0x09d1921c, 0xfe1deb1c,
0xb129a73e, 0xe88235f5, 0x2ebb4484, 0xe99c7026, 0xb45f7e41,
0x3991d639, 0x835339f4, 0x9c845f8b, 0xbdf9283b, 0x1ff897ff,
0xde05980f, 0xef2f118b, 0x5a0a6d1f, 0x6d367ecf, 0x27cb09b7,
0x4f463f66, 0x9e5fea2d, 0x7527bac7, 0xebe5f17b, 0x3d0739f7,
0x8a5292ea, 0x6bfb5fb1, 0x1f8d5d08, 0x56033046, 0xfc7b6bab,
0xf0cfbc21
// @formatter:on
});
private static ArrayDataPointerConstant pi4 = pointerConstant(8, new int[]{
// @formatter:off
0x40000000, 0x3fe921fb,
});
private static ArrayDataPointerConstant pi48 = pointerConstant(8, new int[]{
0x18469899, 0x3e64442d
// @formatter:on
});
private static ArrayDataPointerConstant pi32Inv = pointerConstant(8, new int[]{
// @formatter:off
0x6dc9c883, 0x40245f30
// @formatter:on
});
private static ArrayDataPointerConstant shifter = pointerConstant(8, new int[]{
// @formatter:off
0x00000000, 0x43380000
// @formatter:on
});
private static ArrayDataPointerConstant signMask = pointerConstant(8, new int[]{
// @formatter:off
0x00000000, 0x80000000
// @formatter:on
});
private static ArrayDataPointerConstant p3 = pointerConstant(8, new int[]{
// @formatter:off
0x2e037073, 0x3b63198a
// @formatter:on
});
private static ArrayDataPointerConstant allOnes = pointerConstant(8, new int[]{
// @formatter:off
0xffffffff, 0x3fefffff
// @formatter:on
});
private static ArrayDataPointerConstant twoPow55 = pointerConstant(8, new int[]{
// @formatter:off
0x00000000, 0x43600000
// @formatter:on
});
private static ArrayDataPointerConstant twoPowM55 = pointerConstant(8, new int[]{
// @formatter:off
0x00000000, 0x3c800000
// @formatter:on
});
private static ArrayDataPointerConstant p1 = pointerConstant(8, new int[]{
// @formatter:off
0x54400000, 0x3fb921fb
// @formatter:on
});
private static ArrayDataPointerConstant negZero = pointerConstant(8, new int[]{
// @formatter:off
0x00000000, 0x80000000
// @formatter:on
});
@Override
public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
/*
* This code relies on recordExternalAddress providing the same address when called
* repeatedly. Especially for piInvTable.
*/
Label block0 = new Label();
Label block1 = new Label();
Label block2 = new Label();
Label block3 = new Label();
Label block4 = new Label();
Label block5 = new Label();
Label block6 = new Label();
Label block7 = new Label();
Label block8 = new Label();
Label block9 = new Label();
Label block10 = new Label();
Label block11 = new Label();
Label block12 = new Label();
Label block13 = new Label();
Label block14 = new Label();
masm.push(AMD64.rbx);
masm.subq(rsp, 16);
masm.movsd(new AMD64Address(rsp, 8), xmm0);
masm.movl(rax, new AMD64Address(rsp, 12));
masm.movq(xmm1, recordExternalAddress(crb, pi32Inv)); // 0x6dc9c883, 0x40245f30
masm.movq(xmm2, recordExternalAddress(crb, shifter)); // 0x00000000, 0x43380000
masm.andl(rax, 2147418112);
masm.subl(rax, 808452096);
masm.cmplAndJcc(rax, 281346048, ConditionFlag.Above, block0, false);
masm.mulsd(xmm1, xmm0);
masm.movdqu(xmm5, recordExternalAddress(crb, onehalf)); // 0x00000000, 0x3fe00000,
// 0x00000000, 0x3fe00000
masm.movq(xmm4, recordExternalAddress(crb, signMask)); // 0x00000000, 0x80000000
masm.pand(xmm4, xmm0);
masm.por(xmm5, xmm4);
masm.addpd(xmm1, xmm5);
masm.cvttsd2sil(rdx, xmm1);
masm.cvtsi2sdl(xmm1, rdx);
masm.movdqu(xmm6, recordExternalAddress(crb, p2)); // 0x1a600000, 0x3d90b461,
// 0x1a600000, 0x3d90b461
masm.movq(r8, 0x3fb921fb54400000L);
masm.movdq(xmm3, r8);
masm.movdqu(xmm5, recordExternalAddress(crb, sc4)); // 0xa556c734, 0x3ec71de3,
// 0x1a01a01a, 0x3efa01a0
masm.pshufd(xmm4, xmm0, 68);
masm.mulsd(xmm3, xmm1);
if (masm.supports(AMD64.CPUFeature.SSE3)) {
masm.movddup(xmm1, xmm1);
} else {
masm.movlhps(xmm1, xmm1);
}
masm.andl(rdx, 63);
masm.shll(rdx, 5);
masm.leaq(AMD64.rax, recordExternalAddress(crb, ctable));
masm.addq(AMD64.rax, AMD64.rdx);
masm.mulpd(xmm6, xmm1);
masm.mulsd(xmm1, recordExternalAddress(crb, p3)); // 0x2e037073, 0x3b63198a
masm.subsd(xmm4, xmm3);
masm.movq(xmm7, new AMD64Address(AMD64.rax, 8));
masm.subsd(xmm0, xmm3);
if (masm.supports(AMD64.CPUFeature.SSE3)) {
masm.movddup(xmm3, xmm4);
} else {
masm.movdqu(xmm3, xmm4);
masm.movlhps(xmm3, xmm3);
}
masm.subsd(xmm4, xmm6);
masm.pshufd(xmm0, xmm0, 68);
masm.movdqu(xmm2, new AMD64Address(AMD64.rax, 0));
masm.mulpd(xmm5, xmm0);
masm.subpd(xmm0, xmm6);
masm.mulsd(xmm7, xmm4);
masm.subsd(xmm3, xmm4);
masm.mulpd(xmm5, xmm0);
masm.mulpd(xmm0, xmm0);
masm.subsd(xmm3, xmm6);
masm.movdqu(xmm6, recordExternalAddress(crb, sc2)); // 0x11111111, 0x3f811111,
// 0x55555555, 0x3fa55555
masm.subsd(xmm1, xmm3);
masm.movq(xmm3, new AMD64Address(AMD64.rax, 24));
masm.addsd(xmm2, xmm3);
masm.subsd(xmm7, xmm2);
masm.mulsd(xmm2, xmm4);
masm.mulpd(xmm6, xmm0);
masm.mulsd(xmm3, xmm4);
masm.mulpd(xmm2, xmm0);
masm.mulpd(xmm0, xmm0);
masm.addpd(xmm5, recordExternalAddress(crb, sc3)); // 0x1a01a01a, 0xbf2a01a0,
// 0x16c16c17, 0xbf56c16c
masm.mulsd(xmm4, new AMD64Address(AMD64.rax, 0));
masm.addpd(xmm6, recordExternalAddress(crb, sc1)); // 0x55555555, 0xbfc55555,
// 0x00000000, 0xbfe00000
masm.mulpd(xmm5, xmm0);
masm.movdqu(xmm0, xmm3);
masm.addsd(xmm3, new AMD64Address(AMD64.rax, 8));
masm.mulpd(xmm1, xmm7);
masm.movdqu(xmm7, xmm4);
masm.addsd(xmm4, xmm3);
masm.addpd(xmm6, xmm5);
masm.movq(xmm5, new AMD64Address(AMD64.rax, 8));
masm.subsd(xmm5, xmm3);
masm.subsd(xmm3, xmm4);
masm.addsd(xmm1, new AMD64Address(AMD64.rax, 16));
masm.mulpd(xmm6, xmm2);
masm.addsd(xmm5, xmm0);
masm.addsd(xmm3, xmm7);
masm.addsd(xmm1, xmm5);
masm.addsd(xmm1, xmm3);
masm.addsd(xmm1, xmm6);
masm.unpckhpd(xmm6, xmm6);
masm.movdqu(xmm0, xmm4);
masm.addsd(xmm1, xmm6);
masm.addsd(xmm0, xmm1);
masm.jmp(block14);
masm.bind(block0);
masm.jcc(ConditionFlag.Greater, block1);
masm.shrl(rax, 20);
masm.cmplAndJcc(rax, 3325, ConditionFlag.NotEqual, block2, false);
masm.mulsd(xmm0, recordExternalAddress(crb, allOnes)); // 0xffffffff, 0x3fefffff
masm.jmp(block14);
masm.bind(block2);
masm.movq(xmm3, recordExternalAddress(crb, twoPow55)); // 0x00000000, 0x43600000
masm.mulsd(xmm3, xmm0);
masm.subsd(xmm3, xmm0);
masm.mulsd(xmm3, recordExternalAddress(crb, twoPowM55)); // 0x00000000, 0x3c800000
masm.jmp(block14);
masm.bind(block1);
masm.pextrw(rax, xmm0, 3);
masm.andl(rax, 32752);
masm.cmplAndJcc(rax, 32752, ConditionFlag.Equal, block3, false);
masm.pextrw(rcx, xmm0, 3);
masm.andl(rcx, 32752);
masm.subl(rcx, 16224);
masm.shrl(rcx, 7);
masm.andl(rcx, 65532);
masm.leaq(r11, recordExternalAddress(crb, piInvTable));
masm.addq(AMD64.rcx, r11);
masm.movdq(AMD64.rax, xmm0);
masm.movl(r10, new AMD64Address(AMD64.rcx, 20));
masm.movl(r8, new AMD64Address(AMD64.rcx, 24));
masm.movl(rdx, rax);
masm.shrq(AMD64.rax, 21);
masm.orl(rax, Integer.MIN_VALUE);
masm.shrl(rax, 11);
masm.movl(r9, r10);
masm.imulq(r10, AMD64.rdx);
masm.imulq(r9, AMD64.rax);
masm.imulq(r8, AMD64.rax);
masm.movl(rsi, new AMD64Address(AMD64.rcx, 16));
masm.movl(rdi, new AMD64Address(AMD64.rcx, 12));
masm.movl(r11, r10);
masm.shrq(r10, 32);
masm.addq(r9, r10);
masm.addq(r11, r8);
masm.movl(r8, r11);
masm.shrq(r11, 32);
masm.addq(r9, r11);
masm.movl(r10, rsi);
masm.imulq(rsi, AMD64.rdx);
masm.imulq(r10, AMD64.rax);
masm.movl(r11, rdi);
masm.imulq(rdi, AMD64.rdx);
masm.movl(rbx, rsi);
masm.shrq(rsi, 32);
masm.addq(r9, AMD64.rbx);
masm.movl(rbx, r9);
masm.shrq(r9, 32);
masm.addq(r10, rsi);
masm.addq(r10, r9);
masm.shlq(AMD64.rbx, 32);
masm.orq(r8, AMD64.rbx);
masm.imulq(r11, AMD64.rax);
masm.movl(r9, new AMD64Address(AMD64.rcx, 8));
masm.movl(rsi, new AMD64Address(AMD64.rcx, 4));
masm.movl(rbx, rdi);
masm.shrq(rdi, 32);
masm.addq(r10, AMD64.rbx);
masm.movl(rbx, r10);
masm.shrq(r10, 32);
masm.addq(r11, rdi);
masm.addq(r11, r10);
masm.movq(rdi, r9);
masm.imulq(r9, AMD64.rdx);
masm.imulq(rdi, AMD64.rax);
masm.movl(r10, r9);
masm.shrq(r9, 32);
masm.addq(r11, r10);
masm.movl(r10, r11);
masm.shrq(r11, 32);
masm.addq(rdi, r9);
masm.addq(rdi, r11);
masm.movq(r9, rsi);
masm.imulq(rsi, AMD64.rdx);
masm.imulq(r9, AMD64.rax);
masm.shlq(r10, 32);
masm.orq(r10, AMD64.rbx);
masm.movl(rax, new AMD64Address(AMD64.rcx, 0));
masm.movl(r11, rsi);
masm.shrq(rsi, 32);
masm.addq(rdi, r11);
masm.movl(r11, rdi);
masm.shrq(rdi, 32);
masm.addq(r9, rsi);
masm.addq(r9, rdi);
masm.imulq(AMD64.rdx, AMD64.rax);
masm.pextrw(rbx, xmm0, 3);
masm.leaq(rdi, recordExternalAddress(crb, piInvTable));
masm.subq(AMD64.rcx, rdi);
masm.addl(rcx, rcx);
masm.addl(rcx, rcx);
masm.addl(rcx, rcx);
masm.addl(rcx, 19);
masm.movl(rsi, 32768);
masm.andl(rsi, rbx);
masm.shrl(rbx, 4);
masm.andl(rbx, 2047);
masm.subl(rbx, 1023);
masm.subl(rcx, rbx);
masm.addq(r9, AMD64.rdx);
masm.movl(rdx, rcx);
masm.addl(rdx, 32);
masm.cmplAndJcc(rcx, 1, ConditionFlag.Less, block4, false);
masm.negl(rcx);
masm.addl(rcx, 29);
masm.shll(r9);
masm.movl(rdi, r9);
masm.andl(r9, 536870911);
masm.testlAndJcc(r9, 268435456, ConditionFlag.NotEqual, block5, false);
masm.shrl(r9);
masm.movl(rbx, 0);
masm.shlq(r9, 32);
masm.orq(r9, r11);
masm.bind(block6);
masm.bind(block7);
masm.cmpqAndJcc(r9, 0, ConditionFlag.Equal, block8, false);
masm.bind(block9);
masm.bsrq(r11, r9);
masm.movl(rcx, 29);
masm.sublAndJcc(rcx, r11, ConditionFlag.LessEqual, block10, false);
masm.shlq(r9);
masm.movq(AMD64.rax, r10);
masm.shlq(r10);
masm.addl(rdx, rcx);
masm.negl(rcx);
masm.addl(rcx, 64);
masm.shrq(AMD64.rax);
masm.shrq(r8);
masm.orq(r9, AMD64.rax);
masm.orq(r10, r8);
masm.bind(block11);
masm.cvtsi2sdq(xmm0, r9);
masm.shrq(r10, 1);
masm.cvtsi2sdq(xmm3, r10);
masm.xorpd(xmm4, xmm4);
masm.shll(rdx, 4);
masm.negl(rdx);
masm.addl(rdx, 16368);
masm.orl(rdx, rsi);
masm.xorl(rdx, rbx);
masm.pinsrw(xmm4, rdx, 3);
masm.movq(xmm2, recordExternalAddress(crb, pi4)); // 0x40000000, 0x3fe921fb,
// 0x18469899, 0x3e64442d
masm.movq(xmm6, recordExternalAddress(crb, pi48)); // 0x3fe921fb, 0x18469899,
// 0x3e64442d
masm.xorpd(xmm5, xmm5);
masm.subl(rdx, 1008);
masm.pinsrw(xmm5, rdx, 3);
masm.mulsd(xmm0, xmm4);
masm.shll(rsi, 16);
masm.sarl(rsi, 31);
masm.mulsd(xmm3, xmm5);
masm.movdqu(xmm1, xmm0);
masm.mulsd(xmm0, xmm2);
masm.shrl(rdi, 29);
masm.addsd(xmm1, xmm3);
masm.mulsd(xmm3, xmm2);
masm.addl(rdi, rsi);
masm.xorl(rdi, rsi);
masm.mulsd(xmm6, xmm1);
masm.movl(rax, rdi);
masm.addsd(xmm6, xmm3);
masm.movdqu(xmm2, xmm0);
masm.addsd(xmm0, xmm6);
masm.subsd(xmm2, xmm0);
masm.addsd(xmm6, xmm2);
masm.bind(block12);
masm.movq(xmm1, recordExternalAddress(crb, pi32Inv)); // 0x6dc9c883, 0x40245f30
masm.mulsd(xmm1, xmm0);
masm.movq(xmm5, recordExternalAddress(crb, onehalf)); // 0x00000000, 0x3fe00000,
// 0x00000000, 0x3fe00000
masm.movq(xmm4, recordExternalAddress(crb, signMask)); // 0x00000000, 0x80000000
masm.pand(xmm4, xmm0);
masm.por(xmm5, xmm4);
masm.addpd(xmm1, xmm5);
masm.cvttsd2sil(rdx, xmm1);
masm.cvtsi2sdl(xmm1, rdx);
masm.movq(xmm3, recordExternalAddress(crb, p1)); // 0x54400000, 0x3fb921fb
masm.movdqu(xmm2, recordExternalAddress(crb, p2)); // 0x1a600000, 0x3d90b461,
// 0x1a600000, 0x3d90b461
masm.mulsd(xmm3, xmm1);
masm.unpcklpd(xmm1, xmm1);
masm.shll(rax, 3);
masm.addl(rdx, 1865216);
masm.movdqu(xmm4, xmm0);
masm.addl(rdx, rax);
masm.andl(rdx, 63);
masm.movdqu(xmm5, recordExternalAddress(crb, sc4)); // 0x54400000, 0x3fb921fb
masm.leaq(AMD64.rax, recordExternalAddress(crb, ctable));
masm.shll(rdx, 5);
masm.addq(AMD64.rax, AMD64.rdx);
masm.mulpd(xmm2, xmm1);
masm.subsd(xmm0, xmm3);
masm.mulsd(xmm1, recordExternalAddress(crb, p3)); // 0x2e037073, 0x3b63198a
masm.subsd(xmm4, xmm3);
masm.movq(xmm7, new AMD64Address(AMD64.rax, 8));
masm.unpcklpd(xmm0, xmm0);
masm.movdqu(xmm3, xmm4);
masm.subsd(xmm4, xmm2);
masm.mulpd(xmm5, xmm0);
masm.subpd(xmm0, xmm2);
masm.mulsd(xmm7, xmm4);
masm.subsd(xmm3, xmm4);
masm.mulpd(xmm5, xmm0);
masm.mulpd(xmm0, xmm0);
masm.subsd(xmm3, xmm2);
masm.movdqu(xmm2, new AMD64Address(AMD64.rax, 0));
masm.subsd(xmm1, xmm3);
masm.movq(xmm3, new AMD64Address(AMD64.rax, 24));
masm.addsd(xmm2, xmm3);
masm.subsd(xmm7, xmm2);
masm.subsd(xmm1, xmm6);
masm.movdqu(xmm6, recordExternalAddress(crb, sc2)); // 0x11111111, 0x3f811111,
// 0x55555555, 0x3fa55555
masm.mulsd(xmm2, xmm4);
masm.mulpd(xmm6, xmm0);
masm.mulsd(xmm3, xmm4);
masm.mulpd(xmm2, xmm0);
masm.mulpd(xmm0, xmm0);
masm.addpd(xmm5, recordExternalAddress(crb, sc3)); // 0x1a01a01a, 0xbf2a01a0,
// 0x16c16c17, 0xbf56c16c
masm.mulsd(xmm4, new AMD64Address(AMD64.rax, 0));
masm.addpd(xmm6, recordExternalAddress(crb, sc1)); // 0x55555555, 0xbfc55555,
// 0x00000000, 0xbfe00000
masm.mulpd(xmm5, xmm0);
masm.movdqu(xmm0, xmm3);
masm.addsd(xmm3, new AMD64Address(AMD64.rax, 8));
masm.mulpd(xmm1, xmm7);
masm.movdqu(xmm7, xmm4);
masm.addsd(xmm4, xmm3);
masm.addpd(xmm6, xmm5);
masm.movq(xmm5, new AMD64Address(AMD64.rax, 8));
masm.subsd(xmm5, xmm3);
masm.subsd(xmm3, xmm4);
masm.addsd(xmm1, new AMD64Address(AMD64.rax, 16));
masm.mulpd(xmm6, xmm2);
masm.addsd(xmm5, xmm0);
masm.addsd(xmm3, xmm7);
masm.addsd(xmm1, xmm5);
masm.addsd(xmm1, xmm3);
masm.addsd(xmm1, xmm6);
masm.unpckhpd(xmm6, xmm6);
masm.movdqu(xmm0, xmm4);
masm.addsd(xmm1, xmm6);
masm.addsd(xmm0, xmm1);
masm.jmp(block14);
masm.bind(block8);
masm.addl(rdx, 64);
masm.movq(r9, r10);
masm.movq(r10, r8);
masm.movl(r8, 0);
masm.cmpqAndJcc(r9, 0, ConditionFlag.NotEqual, block9, false);
masm.addl(rdx, 64);
masm.movq(r9, r10);
masm.movq(r10, r8);
masm.cmpqAndJcc(r9, 0, ConditionFlag.NotEqual, block9, false);
masm.xorpd(xmm0, xmm0);
masm.xorpd(xmm6, xmm6);
masm.jmp(block12);
masm.bind(block10);
masm.jcc(ConditionFlag.Equal, block11);
masm.negl(rcx);
masm.shrq(r10);
masm.movq(AMD64.rax, r9);
masm.shrq(r9);
masm.subl(rdx, rcx);
masm.negl(rcx);
masm.addl(rcx, 64);
masm.shlq(AMD64.rax);
masm.orq(r10, AMD64.rax);
masm.jmp(block11);
masm.bind(block4);
masm.negl(rcx);
masm.shlq(r9, 32);
masm.orq(r9, r11);
masm.shlq(r9);
masm.movq(rdi, r9);
masm.testlAndJcc(r9, Integer.MIN_VALUE, ConditionFlag.NotEqual, block13, false);
masm.shrl(r9);
masm.movl(rbx, 0);
masm.shrq(rdi, 3);
masm.jmp(block7);
masm.bind(block5);
masm.shrl(r9);
masm.movl(rbx, 536870912);
masm.shrl(rbx);
masm.shlq(r9, 32);
masm.orq(r9, r11);
masm.shlq(AMD64.rbx, 32);
masm.addl(rdi, 536870912);
masm.movl(AMD64.rcx, 0);
masm.movl(r11, 0);
masm.subq(AMD64.rcx, r8);
masm.sbbq(r11, r10);
masm.sbbq(AMD64.rbx, r9);
masm.movq(r8, AMD64.rcx);
masm.movq(r10, r11);
masm.movq(r9, AMD64.rbx);
masm.movl(rbx, 32768);
masm.jmp(block6);
masm.bind(block13);
masm.shrl(r9);
masm.movq(AMD64.rbx, 0x100000000L);
masm.shrq(AMD64.rbx);
masm.movl(AMD64.rcx, 0);
masm.movl(r11, 0);
masm.subq(AMD64.rcx, r8);
masm.sbbq(r11, r10);
masm.sbbq(AMD64.rbx, r9);
masm.movq(r8, AMD64.rcx);
masm.movq(r10, r11);
masm.movq(r9, AMD64.rbx);
masm.movl(rbx, 32768);
masm.shrq(rdi, 3);
masm.addl(rdi, 536870912);
masm.jmp(block7);
masm.bind(block3);
masm.movq(xmm0, new AMD64Address(rsp, 8));
masm.mulsd(xmm0, recordExternalAddress(crb, negZero)); // 0x00000000, 0x80000000
masm.movq(new AMD64Address(rsp, 0), xmm0);
masm.bind(block14);
masm.addq(rsp, 16);
masm.pop(AMD64.rbx);
}
}
|
googleads/googleads-java-lib | 37,064 | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202411/Dimension.java | // Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Dimension.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4.1-SNAPSHOT Mar 20, 2024 (11:59:10 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202411;
public class Dimension implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected Dimension(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _MONTH_AND_YEAR = "MONTH_AND_YEAR";
public static final java.lang.String _WEEK = "WEEK";
public static final java.lang.String _DATE = "DATE";
public static final java.lang.String _DAY = "DAY";
public static final java.lang.String _HOUR = "HOUR";
public static final java.lang.String _DATE_PT = "DATE_PT";
public static final java.lang.String _WEEK_PT = "WEEK_PT";
public static final java.lang.String _MONTH_YEAR_PT = "MONTH_YEAR_PT";
public static final java.lang.String _DAY_OF_WEEK_PT = "DAY_OF_WEEK_PT";
public static final java.lang.String _LINE_ITEM_ID = "LINE_ITEM_ID";
public static final java.lang.String _LINE_ITEM_NAME = "LINE_ITEM_NAME";
public static final java.lang.String _LINE_ITEM_TYPE = "LINE_ITEM_TYPE";
public static final java.lang.String _ORDER_ID = "ORDER_ID";
public static final java.lang.String _ORDER_NAME = "ORDER_NAME";
public static final java.lang.String _ORDER_DELIVERY_STATUS = "ORDER_DELIVERY_STATUS";
public static final java.lang.String _ADVERTISER_ID = "ADVERTISER_ID";
public static final java.lang.String _ADVERTISER_NAME = "ADVERTISER_NAME";
public static final java.lang.String _AD_NETWORK_ID = "AD_NETWORK_ID";
public static final java.lang.String _AD_NETWORK_NAME = "AD_NETWORK_NAME";
public static final java.lang.String _SALESPERSON_ID = "SALESPERSON_ID";
public static final java.lang.String _SALESPERSON_NAME = "SALESPERSON_NAME";
public static final java.lang.String _CREATIVE_ID = "CREATIVE_ID";
public static final java.lang.String _CREATIVE_NAME = "CREATIVE_NAME";
public static final java.lang.String _CREATIVE_TYPE = "CREATIVE_TYPE";
public static final java.lang.String _CREATIVE_BILLING_TYPE = "CREATIVE_BILLING_TYPE";
public static final java.lang.String _CUSTOM_EVENT_ID = "CUSTOM_EVENT_ID";
public static final java.lang.String _CUSTOM_EVENT_NAME = "CUSTOM_EVENT_NAME";
public static final java.lang.String _CUSTOM_EVENT_TYPE = "CUSTOM_EVENT_TYPE";
public static final java.lang.String _CREATIVE_SIZE = "CREATIVE_SIZE";
public static final java.lang.String _AD_UNIT_ID = "AD_UNIT_ID";
public static final java.lang.String _AD_UNIT_NAME = "AD_UNIT_NAME";
public static final java.lang.String _PARENT_AD_UNIT_ID = "PARENT_AD_UNIT_ID";
public static final java.lang.String _PARENT_AD_UNIT_NAME = "PARENT_AD_UNIT_NAME";
public static final java.lang.String _PLACEMENT_ID = "PLACEMENT_ID";
public static final java.lang.String _PLACEMENT_NAME = "PLACEMENT_NAME";
public static final java.lang.String _PLACEMENT_STATUS = "PLACEMENT_STATUS";
public static final java.lang.String _TARGETING = "TARGETING";
public static final java.lang.String _BROWSER_NAME = "BROWSER_NAME";
public static final java.lang.String _DEVICE_CATEGORY_ID = "DEVICE_CATEGORY_ID";
public static final java.lang.String _DEVICE_CATEGORY_NAME = "DEVICE_CATEGORY_NAME";
public static final java.lang.String _COUNTRY_CRITERIA_ID = "COUNTRY_CRITERIA_ID";
public static final java.lang.String _COUNTRY_CODE = "COUNTRY_CODE";
public static final java.lang.String _COUNTRY_NAME = "COUNTRY_NAME";
public static final java.lang.String _REGION_CRITERIA_ID = "REGION_CRITERIA_ID";
public static final java.lang.String _REGION_NAME = "REGION_NAME";
public static final java.lang.String _CITY_CRITERIA_ID = "CITY_CRITERIA_ID";
public static final java.lang.String _CITY_NAME = "CITY_NAME";
public static final java.lang.String _METRO_CRITERIA_ID = "METRO_CRITERIA_ID";
public static final java.lang.String _METRO_NAME = "METRO_NAME";
public static final java.lang.String _POSTAL_CODE_CRITERIA_ID = "POSTAL_CODE_CRITERIA_ID";
public static final java.lang.String _POSTAL_CODE = "POSTAL_CODE";
public static final java.lang.String _CUSTOM_TARGETING_VALUE_ID = "CUSTOM_TARGETING_VALUE_ID";
public static final java.lang.String _CUSTOM_CRITERIA = "CUSTOM_CRITERIA";
public static final java.lang.String _CONTENT_ID = "CONTENT_ID";
public static final java.lang.String _CONTENT_NAME = "CONTENT_NAME";
public static final java.lang.String _CONTENT_BUNDLE_ID = "CONTENT_BUNDLE_ID";
public static final java.lang.String _CONTENT_BUNDLE_NAME = "CONTENT_BUNDLE_NAME";
public static final java.lang.String _CMS_METADATA = "CMS_METADATA";
public static final java.lang.String _VIDEO_FALLBACK_POSITION = "VIDEO_FALLBACK_POSITION";
public static final java.lang.String _POSITION_OF_POD = "POSITION_OF_POD";
public static final java.lang.String _POSITION_IN_POD = "POSITION_IN_POD";
public static final java.lang.String _CUSTOM_SPOT_ID = "CUSTOM_SPOT_ID";
public static final java.lang.String _CUSTOM_SPOT_NAME = "CUSTOM_SPOT_NAME";
public static final java.lang.String _VIDEO_REDIRECT_THIRD_PARTY = "VIDEO_REDIRECT_THIRD_PARTY";
public static final java.lang.String _VIDEO_BREAK_TYPE = "VIDEO_BREAK_TYPE";
public static final java.lang.String _VIDEO_BREAK_TYPE_NAME = "VIDEO_BREAK_TYPE_NAME";
public static final java.lang.String _VIDEO_VAST_VERSION = "VIDEO_VAST_VERSION";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION_ID = "VIDEO_AD_REQUEST_DURATION_ID";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION = "VIDEO_AD_REQUEST_DURATION";
public static final java.lang.String _VIDEO_PLCMT_ID = "VIDEO_PLCMT_ID";
public static final java.lang.String _VIDEO_PLCMT_NAME = "VIDEO_PLCMT_NAME";
public static final java.lang.String _INVENTORY_FORMAT = "INVENTORY_FORMAT";
public static final java.lang.String _INVENTORY_FORMAT_NAME = "INVENTORY_FORMAT_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_ID = "PARTNER_MANAGEMENT_PARTNER_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_NAME = "PARTNER_MANAGEMENT_PARTNER_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_ID = "PARTNER_MANAGEMENT_PARTNER_LABEL_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = "PARTNER_MANAGEMENT_PARTNER_LABEL_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_ID = "PARTNER_MANAGEMENT_ASSIGNMENT_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_NAME = "PARTNER_MANAGEMENT_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_ID = "INVENTORY_SHARE_ASSIGNMENT_ID";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_NAME = "INVENTORY_SHARE_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_OUTCOME = "INVENTORY_SHARE_OUTCOME";
public static final java.lang.String _GRP_DEMOGRAPHICS = "GRP_DEMOGRAPHICS";
public static final java.lang.String _AD_REQUEST_AD_UNIT_SIZES = "AD_REQUEST_AD_UNIT_SIZES";
public static final java.lang.String _AD_REQUEST_CUSTOM_CRITERIA = "AD_REQUEST_CUSTOM_CRITERIA";
public static final java.lang.String _IS_FIRST_LOOK_DEAL = "IS_FIRST_LOOK_DEAL";
public static final java.lang.String _IS_ADX_DIRECT = "IS_ADX_DIRECT";
public static final java.lang.String _YIELD_GROUP_ID = "YIELD_GROUP_ID";
public static final java.lang.String _YIELD_GROUP_NAME = "YIELD_GROUP_NAME";
public static final java.lang.String _YIELD_PARTNER = "YIELD_PARTNER";
public static final java.lang.String _YIELD_PARTNER_TAG = "YIELD_PARTNER_TAG";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_ID = "EXCHANGE_BIDDING_DEAL_ID";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_TYPE = "EXCHANGE_BIDDING_DEAL_TYPE";
public static final java.lang.String _CLASSIFIED_ADVERTISER_ID = "CLASSIFIED_ADVERTISER_ID";
public static final java.lang.String _CLASSIFIED_ADVERTISER_NAME = "CLASSIFIED_ADVERTISER_NAME";
public static final java.lang.String _CLASSIFIED_BRAND_ID = "CLASSIFIED_BRAND_ID";
public static final java.lang.String _CLASSIFIED_BRAND_NAME = "CLASSIFIED_BRAND_NAME";
public static final java.lang.String _MEDIATION_TYPE = "MEDIATION_TYPE";
public static final java.lang.String _NATIVE_TEMPLATE_ID = "NATIVE_TEMPLATE_ID";
public static final java.lang.String _NATIVE_TEMPLATE_NAME = "NATIVE_TEMPLATE_NAME";
public static final java.lang.String _NATIVE_STYLE_ID = "NATIVE_STYLE_ID";
public static final java.lang.String _NATIVE_STYLE_NAME = "NATIVE_STYLE_NAME";
public static final java.lang.String _CHILD_NETWORK_CODE = "CHILD_NETWORK_CODE";
public static final java.lang.String _MOBILE_APP_RESOLVED_ID = "MOBILE_APP_RESOLVED_ID";
public static final java.lang.String _MOBILE_APP_NAME = "MOBILE_APP_NAME";
public static final java.lang.String _MOBILE_DEVICE_NAME = "MOBILE_DEVICE_NAME";
public static final java.lang.String _MOBILE_INVENTORY_TYPE = "MOBILE_INVENTORY_TYPE";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_ID = "OPERATING_SYSTEM_VERSION_ID";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_NAME = "OPERATING_SYSTEM_VERSION_NAME";
public static final java.lang.String _REQUEST_TYPE = "REQUEST_TYPE";
public static final java.lang.String _AD_UNIT_STATUS = "AD_UNIT_STATUS";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_ID = "MASTER_COMPANION_CREATIVE_ID";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_NAME = "MASTER_COMPANION_CREATIVE_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_ID = "AUDIENCE_SEGMENT_ID";
public static final java.lang.String _AUDIENCE_SEGMENT_NAME = "AUDIENCE_SEGMENT_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = "AUDIENCE_SEGMENT_DATA_PROVIDER_NAME";
public static final java.lang.String _WEB_PROPERTY_CODE = "WEB_PROPERTY_CODE";
public static final java.lang.String _BUYING_AGENCY_NAME = "BUYING_AGENCY_NAME";
public static final java.lang.String _BUYER_NETWORK_ID = "BUYER_NETWORK_ID";
public static final java.lang.String _BUYER_NETWORK_NAME = "BUYER_NETWORK_NAME";
public static final java.lang.String _BIDDER_ID = "BIDDER_ID";
public static final java.lang.String _BIDDER_NAME = "BIDDER_NAME";
public static final java.lang.String _ADVERTISER_DOMAIN_NAME = "ADVERTISER_DOMAIN_NAME";
public static final java.lang.String _AD_EXCHANGE_OPTIMIZATION_TYPE = "AD_EXCHANGE_OPTIMIZATION_TYPE";
public static final java.lang.String _ADVERTISER_VERTICAL_NAME = "ADVERTISER_VERTICAL_NAME";
public static final java.lang.String _NIELSEN_SEGMENT = "NIELSEN_SEGMENT";
public static final java.lang.String _NIELSEN_DEMOGRAPHICS = "NIELSEN_DEMOGRAPHICS";
public static final java.lang.String _NIELSEN_RESTATEMENT_DATE = "NIELSEN_RESTATEMENT_DATE";
public static final java.lang.String _NIELSEN_DEVICE_ID = "NIELSEN_DEVICE_ID";
public static final java.lang.String _NIELSEN_DEVICE_NAME = "NIELSEN_DEVICE_NAME";
public static final java.lang.String _PROGRAMMATIC_BUYER_ID = "PROGRAMMATIC_BUYER_ID";
public static final java.lang.String _PROGRAMMATIC_BUYER_NAME = "PROGRAMMATIC_BUYER_NAME";
public static final java.lang.String _REQUESTED_AD_SIZES = "REQUESTED_AD_SIZES";
public static final java.lang.String _CREATIVE_SIZE_DELIVERED = "CREATIVE_SIZE_DELIVERED";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_ID = "PROGRAMMATIC_CHANNEL_ID";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_NAME = "PROGRAMMATIC_CHANNEL_NAME";
public static final java.lang.String _CLASSIFIED_YIELD_PARTNER_NAME = "CLASSIFIED_YIELD_PARTNER_NAME";
public static final java.lang.String _DP_DATE = "DP_DATE";
public static final java.lang.String _DP_WEEK = "DP_WEEK";
public static final java.lang.String _DP_MONTH_YEAR = "DP_MONTH_YEAR";
public static final java.lang.String _DP_COUNTRY_CRITERIA_ID = "DP_COUNTRY_CRITERIA_ID";
public static final java.lang.String _DP_COUNTRY_NAME = "DP_COUNTRY_NAME";
public static final java.lang.String _DP_INVENTORY_TYPE = "DP_INVENTORY_TYPE";
public static final java.lang.String _DP_CREATIVE_SIZE = "DP_CREATIVE_SIZE";
public static final java.lang.String _DP_BRAND_NAME = "DP_BRAND_NAME";
public static final java.lang.String _DP_ADVERTISER_NAME = "DP_ADVERTISER_NAME";
public static final java.lang.String _DP_ADX_BUYER_NETWORK_NAME = "DP_ADX_BUYER_NETWORK_NAME";
public static final java.lang.String _DP_MOBILE_DEVICE_NAME = "DP_MOBILE_DEVICE_NAME";
public static final java.lang.String _DP_DEVICE_CATEGORY_NAME = "DP_DEVICE_CATEGORY_NAME";
public static final java.lang.String _DP_TAG_ID = "DP_TAG_ID";
public static final java.lang.String _DP_DEAL_ID = "DP_DEAL_ID";
public static final java.lang.String _DP_APP_ID = "DP_APP_ID";
public static final java.lang.String _CUSTOM_DIMENSION = "CUSTOM_DIMENSION";
public static final java.lang.String _DEMAND_CHANNEL_ID = "DEMAND_CHANNEL_ID";
public static final java.lang.String _DEMAND_CHANNEL_NAME = "DEMAND_CHANNEL_NAME";
public static final java.lang.String _DOMAIN = "DOMAIN";
public static final java.lang.String _SERVING_RESTRICTION_ID = "SERVING_RESTRICTION_ID";
public static final java.lang.String _SERVING_RESTRICTION_NAME = "SERVING_RESTRICTION_NAME";
public static final java.lang.String _UNIFIED_PRICING_RULE_ID = "UNIFIED_PRICING_RULE_ID";
public static final java.lang.String _UNIFIED_PRICING_RULE_NAME = "UNIFIED_PRICING_RULE_NAME";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_ID = "FIRST_LOOK_PRICING_RULE_ID";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_NAME = "FIRST_LOOK_PRICING_RULE_NAME";
public static final java.lang.String _BID_RANGE = "BID_RANGE";
public static final java.lang.String _BID_REJECTION_REASON = "BID_REJECTION_REASON";
public static final java.lang.String _BID_REJECTION_REASON_NAME = "BID_REJECTION_REASON_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_DOMAIN = "AD_TECHNOLOGY_PROVIDER_DOMAIN";
public static final java.lang.String _PROGRAMMATIC_DEAL_ID = "PROGRAMMATIC_DEAL_ID";
public static final java.lang.String _PROGRAMMATIC_DEAL_NAME = "PROGRAMMATIC_DEAL_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_ID = "AD_TECHNOLOGY_PROVIDER_ID";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_NAME = "AD_TECHNOLOGY_PROVIDER_NAME";
public static final java.lang.String _TCF_VENDOR_ID = "TCF_VENDOR_ID";
public static final java.lang.String _TCF_VENDOR_NAME = "TCF_VENDOR_NAME";
public static final java.lang.String _SITE_NAME = "SITE_NAME";
public static final java.lang.String _CHANNEL_NAME = "CHANNEL_NAME";
public static final java.lang.String _URL_ID = "URL_ID";
public static final java.lang.String _URL_NAME = "URL_NAME";
public static final java.lang.String _VIDEO_AD_DURATION = "VIDEO_AD_DURATION";
public static final java.lang.String _VIDEO_AD_TYPE_ID = "VIDEO_AD_TYPE_ID";
public static final java.lang.String _VIDEO_AD_TYPE_NAME = "VIDEO_AD_TYPE_NAME";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_CODE = "AD_EXCHANGE_PRODUCT_CODE";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_NAME = "AD_EXCHANGE_PRODUCT_NAME";
public static final java.lang.String _DYNAMIC_ALLOCATION_ID = "DYNAMIC_ALLOCATION_ID";
public static final java.lang.String _DYNAMIC_ALLOCATION_NAME = "DYNAMIC_ALLOCATION_NAME";
public static final java.lang.String _AD_TYPE_ID = "AD_TYPE_ID";
public static final java.lang.String _AD_TYPE_NAME = "AD_TYPE_NAME";
public static final java.lang.String _AD_LOCATION_ID = "AD_LOCATION_ID";
public static final java.lang.String _AD_LOCATION_NAME = "AD_LOCATION_NAME";
public static final java.lang.String _TARGETING_TYPE_CODE = "TARGETING_TYPE_CODE";
public static final java.lang.String _TARGETING_TYPE_NAME = "TARGETING_TYPE_NAME";
public static final java.lang.String _BRANDING_TYPE_CODE = "BRANDING_TYPE_CODE";
public static final java.lang.String _BRANDING_TYPE_NAME = "BRANDING_TYPE_NAME";
public static final java.lang.String _BANDWIDTH_ID = "BANDWIDTH_ID";
public static final java.lang.String _BANDWIDTH_NAME = "BANDWIDTH_NAME";
public static final java.lang.String _CARRIER_ID = "CARRIER_ID";
public static final java.lang.String _CARRIER_NAME = "CARRIER_NAME";
public static final Dimension MONTH_AND_YEAR = new Dimension(_MONTH_AND_YEAR);
public static final Dimension WEEK = new Dimension(_WEEK);
public static final Dimension DATE = new Dimension(_DATE);
public static final Dimension DAY = new Dimension(_DAY);
public static final Dimension HOUR = new Dimension(_HOUR);
public static final Dimension DATE_PT = new Dimension(_DATE_PT);
public static final Dimension WEEK_PT = new Dimension(_WEEK_PT);
public static final Dimension MONTH_YEAR_PT = new Dimension(_MONTH_YEAR_PT);
public static final Dimension DAY_OF_WEEK_PT = new Dimension(_DAY_OF_WEEK_PT);
public static final Dimension LINE_ITEM_ID = new Dimension(_LINE_ITEM_ID);
public static final Dimension LINE_ITEM_NAME = new Dimension(_LINE_ITEM_NAME);
public static final Dimension LINE_ITEM_TYPE = new Dimension(_LINE_ITEM_TYPE);
public static final Dimension ORDER_ID = new Dimension(_ORDER_ID);
public static final Dimension ORDER_NAME = new Dimension(_ORDER_NAME);
public static final Dimension ORDER_DELIVERY_STATUS = new Dimension(_ORDER_DELIVERY_STATUS);
public static final Dimension ADVERTISER_ID = new Dimension(_ADVERTISER_ID);
public static final Dimension ADVERTISER_NAME = new Dimension(_ADVERTISER_NAME);
public static final Dimension AD_NETWORK_ID = new Dimension(_AD_NETWORK_ID);
public static final Dimension AD_NETWORK_NAME = new Dimension(_AD_NETWORK_NAME);
public static final Dimension SALESPERSON_ID = new Dimension(_SALESPERSON_ID);
public static final Dimension SALESPERSON_NAME = new Dimension(_SALESPERSON_NAME);
public static final Dimension CREATIVE_ID = new Dimension(_CREATIVE_ID);
public static final Dimension CREATIVE_NAME = new Dimension(_CREATIVE_NAME);
public static final Dimension CREATIVE_TYPE = new Dimension(_CREATIVE_TYPE);
public static final Dimension CREATIVE_BILLING_TYPE = new Dimension(_CREATIVE_BILLING_TYPE);
public static final Dimension CUSTOM_EVENT_ID = new Dimension(_CUSTOM_EVENT_ID);
public static final Dimension CUSTOM_EVENT_NAME = new Dimension(_CUSTOM_EVENT_NAME);
public static final Dimension CUSTOM_EVENT_TYPE = new Dimension(_CUSTOM_EVENT_TYPE);
public static final Dimension CREATIVE_SIZE = new Dimension(_CREATIVE_SIZE);
public static final Dimension AD_UNIT_ID = new Dimension(_AD_UNIT_ID);
public static final Dimension AD_UNIT_NAME = new Dimension(_AD_UNIT_NAME);
public static final Dimension PARENT_AD_UNIT_ID = new Dimension(_PARENT_AD_UNIT_ID);
public static final Dimension PARENT_AD_UNIT_NAME = new Dimension(_PARENT_AD_UNIT_NAME);
public static final Dimension PLACEMENT_ID = new Dimension(_PLACEMENT_ID);
public static final Dimension PLACEMENT_NAME = new Dimension(_PLACEMENT_NAME);
public static final Dimension PLACEMENT_STATUS = new Dimension(_PLACEMENT_STATUS);
public static final Dimension TARGETING = new Dimension(_TARGETING);
public static final Dimension BROWSER_NAME = new Dimension(_BROWSER_NAME);
public static final Dimension DEVICE_CATEGORY_ID = new Dimension(_DEVICE_CATEGORY_ID);
public static final Dimension DEVICE_CATEGORY_NAME = new Dimension(_DEVICE_CATEGORY_NAME);
public static final Dimension COUNTRY_CRITERIA_ID = new Dimension(_COUNTRY_CRITERIA_ID);
public static final Dimension COUNTRY_CODE = new Dimension(_COUNTRY_CODE);
public static final Dimension COUNTRY_NAME = new Dimension(_COUNTRY_NAME);
public static final Dimension REGION_CRITERIA_ID = new Dimension(_REGION_CRITERIA_ID);
public static final Dimension REGION_NAME = new Dimension(_REGION_NAME);
public static final Dimension CITY_CRITERIA_ID = new Dimension(_CITY_CRITERIA_ID);
public static final Dimension CITY_NAME = new Dimension(_CITY_NAME);
public static final Dimension METRO_CRITERIA_ID = new Dimension(_METRO_CRITERIA_ID);
public static final Dimension METRO_NAME = new Dimension(_METRO_NAME);
public static final Dimension POSTAL_CODE_CRITERIA_ID = new Dimension(_POSTAL_CODE_CRITERIA_ID);
public static final Dimension POSTAL_CODE = new Dimension(_POSTAL_CODE);
public static final Dimension CUSTOM_TARGETING_VALUE_ID = new Dimension(_CUSTOM_TARGETING_VALUE_ID);
public static final Dimension CUSTOM_CRITERIA = new Dimension(_CUSTOM_CRITERIA);
public static final Dimension CONTENT_ID = new Dimension(_CONTENT_ID);
public static final Dimension CONTENT_NAME = new Dimension(_CONTENT_NAME);
public static final Dimension CONTENT_BUNDLE_ID = new Dimension(_CONTENT_BUNDLE_ID);
public static final Dimension CONTENT_BUNDLE_NAME = new Dimension(_CONTENT_BUNDLE_NAME);
public static final Dimension CMS_METADATA = new Dimension(_CMS_METADATA);
public static final Dimension VIDEO_FALLBACK_POSITION = new Dimension(_VIDEO_FALLBACK_POSITION);
public static final Dimension POSITION_OF_POD = new Dimension(_POSITION_OF_POD);
public static final Dimension POSITION_IN_POD = new Dimension(_POSITION_IN_POD);
public static final Dimension CUSTOM_SPOT_ID = new Dimension(_CUSTOM_SPOT_ID);
public static final Dimension CUSTOM_SPOT_NAME = new Dimension(_CUSTOM_SPOT_NAME);
public static final Dimension VIDEO_REDIRECT_THIRD_PARTY = new Dimension(_VIDEO_REDIRECT_THIRD_PARTY);
public static final Dimension VIDEO_BREAK_TYPE = new Dimension(_VIDEO_BREAK_TYPE);
public static final Dimension VIDEO_BREAK_TYPE_NAME = new Dimension(_VIDEO_BREAK_TYPE_NAME);
public static final Dimension VIDEO_VAST_VERSION = new Dimension(_VIDEO_VAST_VERSION);
public static final Dimension VIDEO_AD_REQUEST_DURATION_ID = new Dimension(_VIDEO_AD_REQUEST_DURATION_ID);
public static final Dimension VIDEO_AD_REQUEST_DURATION = new Dimension(_VIDEO_AD_REQUEST_DURATION);
public static final Dimension VIDEO_PLCMT_ID = new Dimension(_VIDEO_PLCMT_ID);
public static final Dimension VIDEO_PLCMT_NAME = new Dimension(_VIDEO_PLCMT_NAME);
public static final Dimension INVENTORY_FORMAT = new Dimension(_INVENTORY_FORMAT);
public static final Dimension INVENTORY_FORMAT_NAME = new Dimension(_INVENTORY_FORMAT_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_NAME);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_ID = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_ID);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_NAME = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_ID = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_ID);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_NAME = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_OUTCOME = new Dimension(_INVENTORY_SHARE_OUTCOME);
public static final Dimension GRP_DEMOGRAPHICS = new Dimension(_GRP_DEMOGRAPHICS);
public static final Dimension AD_REQUEST_AD_UNIT_SIZES = new Dimension(_AD_REQUEST_AD_UNIT_SIZES);
public static final Dimension AD_REQUEST_CUSTOM_CRITERIA = new Dimension(_AD_REQUEST_CUSTOM_CRITERIA);
public static final Dimension IS_FIRST_LOOK_DEAL = new Dimension(_IS_FIRST_LOOK_DEAL);
public static final Dimension IS_ADX_DIRECT = new Dimension(_IS_ADX_DIRECT);
public static final Dimension YIELD_GROUP_ID = new Dimension(_YIELD_GROUP_ID);
public static final Dimension YIELD_GROUP_NAME = new Dimension(_YIELD_GROUP_NAME);
public static final Dimension YIELD_PARTNER = new Dimension(_YIELD_PARTNER);
public static final Dimension YIELD_PARTNER_TAG = new Dimension(_YIELD_PARTNER_TAG);
public static final Dimension EXCHANGE_BIDDING_DEAL_ID = new Dimension(_EXCHANGE_BIDDING_DEAL_ID);
public static final Dimension EXCHANGE_BIDDING_DEAL_TYPE = new Dimension(_EXCHANGE_BIDDING_DEAL_TYPE);
public static final Dimension CLASSIFIED_ADVERTISER_ID = new Dimension(_CLASSIFIED_ADVERTISER_ID);
public static final Dimension CLASSIFIED_ADVERTISER_NAME = new Dimension(_CLASSIFIED_ADVERTISER_NAME);
public static final Dimension CLASSIFIED_BRAND_ID = new Dimension(_CLASSIFIED_BRAND_ID);
public static final Dimension CLASSIFIED_BRAND_NAME = new Dimension(_CLASSIFIED_BRAND_NAME);
public static final Dimension MEDIATION_TYPE = new Dimension(_MEDIATION_TYPE);
public static final Dimension NATIVE_TEMPLATE_ID = new Dimension(_NATIVE_TEMPLATE_ID);
public static final Dimension NATIVE_TEMPLATE_NAME = new Dimension(_NATIVE_TEMPLATE_NAME);
public static final Dimension NATIVE_STYLE_ID = new Dimension(_NATIVE_STYLE_ID);
public static final Dimension NATIVE_STYLE_NAME = new Dimension(_NATIVE_STYLE_NAME);
public static final Dimension CHILD_NETWORK_CODE = new Dimension(_CHILD_NETWORK_CODE);
public static final Dimension MOBILE_APP_RESOLVED_ID = new Dimension(_MOBILE_APP_RESOLVED_ID);
public static final Dimension MOBILE_APP_NAME = new Dimension(_MOBILE_APP_NAME);
public static final Dimension MOBILE_DEVICE_NAME = new Dimension(_MOBILE_DEVICE_NAME);
public static final Dimension MOBILE_INVENTORY_TYPE = new Dimension(_MOBILE_INVENTORY_TYPE);
public static final Dimension OPERATING_SYSTEM_VERSION_ID = new Dimension(_OPERATING_SYSTEM_VERSION_ID);
public static final Dimension OPERATING_SYSTEM_VERSION_NAME = new Dimension(_OPERATING_SYSTEM_VERSION_NAME);
public static final Dimension REQUEST_TYPE = new Dimension(_REQUEST_TYPE);
public static final Dimension AD_UNIT_STATUS = new Dimension(_AD_UNIT_STATUS);
public static final Dimension MASTER_COMPANION_CREATIVE_ID = new Dimension(_MASTER_COMPANION_CREATIVE_ID);
public static final Dimension MASTER_COMPANION_CREATIVE_NAME = new Dimension(_MASTER_COMPANION_CREATIVE_NAME);
public static final Dimension AUDIENCE_SEGMENT_ID = new Dimension(_AUDIENCE_SEGMENT_ID);
public static final Dimension AUDIENCE_SEGMENT_NAME = new Dimension(_AUDIENCE_SEGMENT_NAME);
public static final Dimension AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = new Dimension(_AUDIENCE_SEGMENT_DATA_PROVIDER_NAME);
public static final Dimension WEB_PROPERTY_CODE = new Dimension(_WEB_PROPERTY_CODE);
public static final Dimension BUYING_AGENCY_NAME = new Dimension(_BUYING_AGENCY_NAME);
public static final Dimension BUYER_NETWORK_ID = new Dimension(_BUYER_NETWORK_ID);
public static final Dimension BUYER_NETWORK_NAME = new Dimension(_BUYER_NETWORK_NAME);
public static final Dimension BIDDER_ID = new Dimension(_BIDDER_ID);
public static final Dimension BIDDER_NAME = new Dimension(_BIDDER_NAME);
public static final Dimension ADVERTISER_DOMAIN_NAME = new Dimension(_ADVERTISER_DOMAIN_NAME);
public static final Dimension AD_EXCHANGE_OPTIMIZATION_TYPE = new Dimension(_AD_EXCHANGE_OPTIMIZATION_TYPE);
public static final Dimension ADVERTISER_VERTICAL_NAME = new Dimension(_ADVERTISER_VERTICAL_NAME);
public static final Dimension NIELSEN_SEGMENT = new Dimension(_NIELSEN_SEGMENT);
public static final Dimension NIELSEN_DEMOGRAPHICS = new Dimension(_NIELSEN_DEMOGRAPHICS);
public static final Dimension NIELSEN_RESTATEMENT_DATE = new Dimension(_NIELSEN_RESTATEMENT_DATE);
public static final Dimension NIELSEN_DEVICE_ID = new Dimension(_NIELSEN_DEVICE_ID);
public static final Dimension NIELSEN_DEVICE_NAME = new Dimension(_NIELSEN_DEVICE_NAME);
public static final Dimension PROGRAMMATIC_BUYER_ID = new Dimension(_PROGRAMMATIC_BUYER_ID);
public static final Dimension PROGRAMMATIC_BUYER_NAME = new Dimension(_PROGRAMMATIC_BUYER_NAME);
public static final Dimension REQUESTED_AD_SIZES = new Dimension(_REQUESTED_AD_SIZES);
public static final Dimension CREATIVE_SIZE_DELIVERED = new Dimension(_CREATIVE_SIZE_DELIVERED);
public static final Dimension PROGRAMMATIC_CHANNEL_ID = new Dimension(_PROGRAMMATIC_CHANNEL_ID);
public static final Dimension PROGRAMMATIC_CHANNEL_NAME = new Dimension(_PROGRAMMATIC_CHANNEL_NAME);
public static final Dimension CLASSIFIED_YIELD_PARTNER_NAME = new Dimension(_CLASSIFIED_YIELD_PARTNER_NAME);
public static final Dimension DP_DATE = new Dimension(_DP_DATE);
public static final Dimension DP_WEEK = new Dimension(_DP_WEEK);
public static final Dimension DP_MONTH_YEAR = new Dimension(_DP_MONTH_YEAR);
public static final Dimension DP_COUNTRY_CRITERIA_ID = new Dimension(_DP_COUNTRY_CRITERIA_ID);
public static final Dimension DP_COUNTRY_NAME = new Dimension(_DP_COUNTRY_NAME);
public static final Dimension DP_INVENTORY_TYPE = new Dimension(_DP_INVENTORY_TYPE);
public static final Dimension DP_CREATIVE_SIZE = new Dimension(_DP_CREATIVE_SIZE);
public static final Dimension DP_BRAND_NAME = new Dimension(_DP_BRAND_NAME);
public static final Dimension DP_ADVERTISER_NAME = new Dimension(_DP_ADVERTISER_NAME);
public static final Dimension DP_ADX_BUYER_NETWORK_NAME = new Dimension(_DP_ADX_BUYER_NETWORK_NAME);
public static final Dimension DP_MOBILE_DEVICE_NAME = new Dimension(_DP_MOBILE_DEVICE_NAME);
public static final Dimension DP_DEVICE_CATEGORY_NAME = new Dimension(_DP_DEVICE_CATEGORY_NAME);
public static final Dimension DP_TAG_ID = new Dimension(_DP_TAG_ID);
public static final Dimension DP_DEAL_ID = new Dimension(_DP_DEAL_ID);
public static final Dimension DP_APP_ID = new Dimension(_DP_APP_ID);
public static final Dimension CUSTOM_DIMENSION = new Dimension(_CUSTOM_DIMENSION);
public static final Dimension DEMAND_CHANNEL_ID = new Dimension(_DEMAND_CHANNEL_ID);
public static final Dimension DEMAND_CHANNEL_NAME = new Dimension(_DEMAND_CHANNEL_NAME);
public static final Dimension DOMAIN = new Dimension(_DOMAIN);
public static final Dimension SERVING_RESTRICTION_ID = new Dimension(_SERVING_RESTRICTION_ID);
public static final Dimension SERVING_RESTRICTION_NAME = new Dimension(_SERVING_RESTRICTION_NAME);
public static final Dimension UNIFIED_PRICING_RULE_ID = new Dimension(_UNIFIED_PRICING_RULE_ID);
public static final Dimension UNIFIED_PRICING_RULE_NAME = new Dimension(_UNIFIED_PRICING_RULE_NAME);
public static final Dimension FIRST_LOOK_PRICING_RULE_ID = new Dimension(_FIRST_LOOK_PRICING_RULE_ID);
public static final Dimension FIRST_LOOK_PRICING_RULE_NAME = new Dimension(_FIRST_LOOK_PRICING_RULE_NAME);
public static final Dimension BID_RANGE = new Dimension(_BID_RANGE);
public static final Dimension BID_REJECTION_REASON = new Dimension(_BID_REJECTION_REASON);
public static final Dimension BID_REJECTION_REASON_NAME = new Dimension(_BID_REJECTION_REASON_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_DOMAIN = new Dimension(_AD_TECHNOLOGY_PROVIDER_DOMAIN);
public static final Dimension PROGRAMMATIC_DEAL_ID = new Dimension(_PROGRAMMATIC_DEAL_ID);
public static final Dimension PROGRAMMATIC_DEAL_NAME = new Dimension(_PROGRAMMATIC_DEAL_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_ID = new Dimension(_AD_TECHNOLOGY_PROVIDER_ID);
public static final Dimension AD_TECHNOLOGY_PROVIDER_NAME = new Dimension(_AD_TECHNOLOGY_PROVIDER_NAME);
public static final Dimension TCF_VENDOR_ID = new Dimension(_TCF_VENDOR_ID);
public static final Dimension TCF_VENDOR_NAME = new Dimension(_TCF_VENDOR_NAME);
public static final Dimension SITE_NAME = new Dimension(_SITE_NAME);
public static final Dimension CHANNEL_NAME = new Dimension(_CHANNEL_NAME);
public static final Dimension URL_ID = new Dimension(_URL_ID);
public static final Dimension URL_NAME = new Dimension(_URL_NAME);
public static final Dimension VIDEO_AD_DURATION = new Dimension(_VIDEO_AD_DURATION);
public static final Dimension VIDEO_AD_TYPE_ID = new Dimension(_VIDEO_AD_TYPE_ID);
public static final Dimension VIDEO_AD_TYPE_NAME = new Dimension(_VIDEO_AD_TYPE_NAME);
public static final Dimension AD_EXCHANGE_PRODUCT_CODE = new Dimension(_AD_EXCHANGE_PRODUCT_CODE);
public static final Dimension AD_EXCHANGE_PRODUCT_NAME = new Dimension(_AD_EXCHANGE_PRODUCT_NAME);
public static final Dimension DYNAMIC_ALLOCATION_ID = new Dimension(_DYNAMIC_ALLOCATION_ID);
public static final Dimension DYNAMIC_ALLOCATION_NAME = new Dimension(_DYNAMIC_ALLOCATION_NAME);
public static final Dimension AD_TYPE_ID = new Dimension(_AD_TYPE_ID);
public static final Dimension AD_TYPE_NAME = new Dimension(_AD_TYPE_NAME);
public static final Dimension AD_LOCATION_ID = new Dimension(_AD_LOCATION_ID);
public static final Dimension AD_LOCATION_NAME = new Dimension(_AD_LOCATION_NAME);
public static final Dimension TARGETING_TYPE_CODE = new Dimension(_TARGETING_TYPE_CODE);
public static final Dimension TARGETING_TYPE_NAME = new Dimension(_TARGETING_TYPE_NAME);
public static final Dimension BRANDING_TYPE_CODE = new Dimension(_BRANDING_TYPE_CODE);
public static final Dimension BRANDING_TYPE_NAME = new Dimension(_BRANDING_TYPE_NAME);
public static final Dimension BANDWIDTH_ID = new Dimension(_BANDWIDTH_ID);
public static final Dimension BANDWIDTH_NAME = new Dimension(_BANDWIDTH_NAME);
public static final Dimension CARRIER_ID = new Dimension(_CARRIER_ID);
public static final Dimension CARRIER_NAME = new Dimension(_CARRIER_NAME);
public java.lang.String getValue() { return _value_;}
public static Dimension fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
Dimension enumeration = (Dimension)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static Dimension fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Dimension.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202411", "Dimension"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
|
googleads/googleads-java-lib | 37,064 | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202502/Dimension.java | // Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Dimension.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4.1-SNAPSHOT Mar 20, 2024 (11:59:10 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202502;
public class Dimension implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected Dimension(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _MONTH_AND_YEAR = "MONTH_AND_YEAR";
public static final java.lang.String _WEEK = "WEEK";
public static final java.lang.String _DATE = "DATE";
public static final java.lang.String _DAY = "DAY";
public static final java.lang.String _HOUR = "HOUR";
public static final java.lang.String _DATE_PT = "DATE_PT";
public static final java.lang.String _WEEK_PT = "WEEK_PT";
public static final java.lang.String _MONTH_YEAR_PT = "MONTH_YEAR_PT";
public static final java.lang.String _DAY_OF_WEEK_PT = "DAY_OF_WEEK_PT";
public static final java.lang.String _LINE_ITEM_ID = "LINE_ITEM_ID";
public static final java.lang.String _LINE_ITEM_NAME = "LINE_ITEM_NAME";
public static final java.lang.String _LINE_ITEM_TYPE = "LINE_ITEM_TYPE";
public static final java.lang.String _ORDER_ID = "ORDER_ID";
public static final java.lang.String _ORDER_NAME = "ORDER_NAME";
public static final java.lang.String _ORDER_DELIVERY_STATUS = "ORDER_DELIVERY_STATUS";
public static final java.lang.String _ADVERTISER_ID = "ADVERTISER_ID";
public static final java.lang.String _ADVERTISER_NAME = "ADVERTISER_NAME";
public static final java.lang.String _AD_NETWORK_ID = "AD_NETWORK_ID";
public static final java.lang.String _AD_NETWORK_NAME = "AD_NETWORK_NAME";
public static final java.lang.String _SALESPERSON_ID = "SALESPERSON_ID";
public static final java.lang.String _SALESPERSON_NAME = "SALESPERSON_NAME";
public static final java.lang.String _CREATIVE_ID = "CREATIVE_ID";
public static final java.lang.String _CREATIVE_NAME = "CREATIVE_NAME";
public static final java.lang.String _CREATIVE_TYPE = "CREATIVE_TYPE";
public static final java.lang.String _CREATIVE_BILLING_TYPE = "CREATIVE_BILLING_TYPE";
public static final java.lang.String _CUSTOM_EVENT_ID = "CUSTOM_EVENT_ID";
public static final java.lang.String _CUSTOM_EVENT_NAME = "CUSTOM_EVENT_NAME";
public static final java.lang.String _CUSTOM_EVENT_TYPE = "CUSTOM_EVENT_TYPE";
public static final java.lang.String _CREATIVE_SIZE = "CREATIVE_SIZE";
public static final java.lang.String _AD_UNIT_ID = "AD_UNIT_ID";
public static final java.lang.String _AD_UNIT_NAME = "AD_UNIT_NAME";
public static final java.lang.String _PARENT_AD_UNIT_ID = "PARENT_AD_UNIT_ID";
public static final java.lang.String _PARENT_AD_UNIT_NAME = "PARENT_AD_UNIT_NAME";
public static final java.lang.String _PLACEMENT_ID = "PLACEMENT_ID";
public static final java.lang.String _PLACEMENT_NAME = "PLACEMENT_NAME";
public static final java.lang.String _PLACEMENT_STATUS = "PLACEMENT_STATUS";
public static final java.lang.String _TARGETING = "TARGETING";
public static final java.lang.String _BROWSER_NAME = "BROWSER_NAME";
public static final java.lang.String _DEVICE_CATEGORY_ID = "DEVICE_CATEGORY_ID";
public static final java.lang.String _DEVICE_CATEGORY_NAME = "DEVICE_CATEGORY_NAME";
public static final java.lang.String _COUNTRY_CRITERIA_ID = "COUNTRY_CRITERIA_ID";
public static final java.lang.String _COUNTRY_CODE = "COUNTRY_CODE";
public static final java.lang.String _COUNTRY_NAME = "COUNTRY_NAME";
public static final java.lang.String _REGION_CRITERIA_ID = "REGION_CRITERIA_ID";
public static final java.lang.String _REGION_NAME = "REGION_NAME";
public static final java.lang.String _CITY_CRITERIA_ID = "CITY_CRITERIA_ID";
public static final java.lang.String _CITY_NAME = "CITY_NAME";
public static final java.lang.String _METRO_CRITERIA_ID = "METRO_CRITERIA_ID";
public static final java.lang.String _METRO_NAME = "METRO_NAME";
public static final java.lang.String _POSTAL_CODE_CRITERIA_ID = "POSTAL_CODE_CRITERIA_ID";
public static final java.lang.String _POSTAL_CODE = "POSTAL_CODE";
public static final java.lang.String _CUSTOM_TARGETING_VALUE_ID = "CUSTOM_TARGETING_VALUE_ID";
public static final java.lang.String _CUSTOM_CRITERIA = "CUSTOM_CRITERIA";
public static final java.lang.String _CONTENT_ID = "CONTENT_ID";
public static final java.lang.String _CONTENT_NAME = "CONTENT_NAME";
public static final java.lang.String _CONTENT_BUNDLE_ID = "CONTENT_BUNDLE_ID";
public static final java.lang.String _CONTENT_BUNDLE_NAME = "CONTENT_BUNDLE_NAME";
public static final java.lang.String _CMS_METADATA = "CMS_METADATA";
public static final java.lang.String _VIDEO_FALLBACK_POSITION = "VIDEO_FALLBACK_POSITION";
public static final java.lang.String _POSITION_OF_POD = "POSITION_OF_POD";
public static final java.lang.String _POSITION_IN_POD = "POSITION_IN_POD";
public static final java.lang.String _CUSTOM_SPOT_ID = "CUSTOM_SPOT_ID";
public static final java.lang.String _CUSTOM_SPOT_NAME = "CUSTOM_SPOT_NAME";
public static final java.lang.String _VIDEO_REDIRECT_THIRD_PARTY = "VIDEO_REDIRECT_THIRD_PARTY";
public static final java.lang.String _VIDEO_BREAK_TYPE = "VIDEO_BREAK_TYPE";
public static final java.lang.String _VIDEO_BREAK_TYPE_NAME = "VIDEO_BREAK_TYPE_NAME";
public static final java.lang.String _VIDEO_VAST_VERSION = "VIDEO_VAST_VERSION";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION_ID = "VIDEO_AD_REQUEST_DURATION_ID";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION = "VIDEO_AD_REQUEST_DURATION";
public static final java.lang.String _VIDEO_PLCMT_ID = "VIDEO_PLCMT_ID";
public static final java.lang.String _VIDEO_PLCMT_NAME = "VIDEO_PLCMT_NAME";
public static final java.lang.String _INVENTORY_FORMAT = "INVENTORY_FORMAT";
public static final java.lang.String _INVENTORY_FORMAT_NAME = "INVENTORY_FORMAT_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_ID = "PARTNER_MANAGEMENT_PARTNER_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_NAME = "PARTNER_MANAGEMENT_PARTNER_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_ID = "PARTNER_MANAGEMENT_PARTNER_LABEL_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = "PARTNER_MANAGEMENT_PARTNER_LABEL_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_ID = "PARTNER_MANAGEMENT_ASSIGNMENT_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_NAME = "PARTNER_MANAGEMENT_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_ID = "INVENTORY_SHARE_ASSIGNMENT_ID";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_NAME = "INVENTORY_SHARE_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_OUTCOME = "INVENTORY_SHARE_OUTCOME";
public static final java.lang.String _GRP_DEMOGRAPHICS = "GRP_DEMOGRAPHICS";
public static final java.lang.String _AD_REQUEST_AD_UNIT_SIZES = "AD_REQUEST_AD_UNIT_SIZES";
public static final java.lang.String _AD_REQUEST_CUSTOM_CRITERIA = "AD_REQUEST_CUSTOM_CRITERIA";
public static final java.lang.String _IS_FIRST_LOOK_DEAL = "IS_FIRST_LOOK_DEAL";
public static final java.lang.String _IS_ADX_DIRECT = "IS_ADX_DIRECT";
public static final java.lang.String _YIELD_GROUP_ID = "YIELD_GROUP_ID";
public static final java.lang.String _YIELD_GROUP_NAME = "YIELD_GROUP_NAME";
public static final java.lang.String _YIELD_PARTNER = "YIELD_PARTNER";
public static final java.lang.String _YIELD_PARTNER_TAG = "YIELD_PARTNER_TAG";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_ID = "EXCHANGE_BIDDING_DEAL_ID";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_TYPE = "EXCHANGE_BIDDING_DEAL_TYPE";
public static final java.lang.String _CLASSIFIED_ADVERTISER_ID = "CLASSIFIED_ADVERTISER_ID";
public static final java.lang.String _CLASSIFIED_ADVERTISER_NAME = "CLASSIFIED_ADVERTISER_NAME";
public static final java.lang.String _CLASSIFIED_BRAND_ID = "CLASSIFIED_BRAND_ID";
public static final java.lang.String _CLASSIFIED_BRAND_NAME = "CLASSIFIED_BRAND_NAME";
public static final java.lang.String _MEDIATION_TYPE = "MEDIATION_TYPE";
public static final java.lang.String _NATIVE_TEMPLATE_ID = "NATIVE_TEMPLATE_ID";
public static final java.lang.String _NATIVE_TEMPLATE_NAME = "NATIVE_TEMPLATE_NAME";
public static final java.lang.String _NATIVE_STYLE_ID = "NATIVE_STYLE_ID";
public static final java.lang.String _NATIVE_STYLE_NAME = "NATIVE_STYLE_NAME";
public static final java.lang.String _CHILD_NETWORK_CODE = "CHILD_NETWORK_CODE";
public static final java.lang.String _MOBILE_APP_RESOLVED_ID = "MOBILE_APP_RESOLVED_ID";
public static final java.lang.String _MOBILE_APP_NAME = "MOBILE_APP_NAME";
public static final java.lang.String _MOBILE_DEVICE_NAME = "MOBILE_DEVICE_NAME";
public static final java.lang.String _MOBILE_INVENTORY_TYPE = "MOBILE_INVENTORY_TYPE";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_ID = "OPERATING_SYSTEM_VERSION_ID";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_NAME = "OPERATING_SYSTEM_VERSION_NAME";
public static final java.lang.String _REQUEST_TYPE = "REQUEST_TYPE";
public static final java.lang.String _AD_UNIT_STATUS = "AD_UNIT_STATUS";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_ID = "MASTER_COMPANION_CREATIVE_ID";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_NAME = "MASTER_COMPANION_CREATIVE_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_ID = "AUDIENCE_SEGMENT_ID";
public static final java.lang.String _AUDIENCE_SEGMENT_NAME = "AUDIENCE_SEGMENT_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = "AUDIENCE_SEGMENT_DATA_PROVIDER_NAME";
public static final java.lang.String _WEB_PROPERTY_CODE = "WEB_PROPERTY_CODE";
public static final java.lang.String _BUYING_AGENCY_NAME = "BUYING_AGENCY_NAME";
public static final java.lang.String _BUYER_NETWORK_ID = "BUYER_NETWORK_ID";
public static final java.lang.String _BUYER_NETWORK_NAME = "BUYER_NETWORK_NAME";
public static final java.lang.String _BIDDER_ID = "BIDDER_ID";
public static final java.lang.String _BIDDER_NAME = "BIDDER_NAME";
public static final java.lang.String _ADVERTISER_DOMAIN_NAME = "ADVERTISER_DOMAIN_NAME";
public static final java.lang.String _AD_EXCHANGE_OPTIMIZATION_TYPE = "AD_EXCHANGE_OPTIMIZATION_TYPE";
public static final java.lang.String _ADVERTISER_VERTICAL_NAME = "ADVERTISER_VERTICAL_NAME";
public static final java.lang.String _NIELSEN_SEGMENT = "NIELSEN_SEGMENT";
public static final java.lang.String _NIELSEN_DEMOGRAPHICS = "NIELSEN_DEMOGRAPHICS";
public static final java.lang.String _NIELSEN_RESTATEMENT_DATE = "NIELSEN_RESTATEMENT_DATE";
public static final java.lang.String _NIELSEN_DEVICE_ID = "NIELSEN_DEVICE_ID";
public static final java.lang.String _NIELSEN_DEVICE_NAME = "NIELSEN_DEVICE_NAME";
public static final java.lang.String _PROGRAMMATIC_BUYER_ID = "PROGRAMMATIC_BUYER_ID";
public static final java.lang.String _PROGRAMMATIC_BUYER_NAME = "PROGRAMMATIC_BUYER_NAME";
public static final java.lang.String _REQUESTED_AD_SIZES = "REQUESTED_AD_SIZES";
public static final java.lang.String _CREATIVE_SIZE_DELIVERED = "CREATIVE_SIZE_DELIVERED";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_ID = "PROGRAMMATIC_CHANNEL_ID";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_NAME = "PROGRAMMATIC_CHANNEL_NAME";
public static final java.lang.String _CLASSIFIED_YIELD_PARTNER_NAME = "CLASSIFIED_YIELD_PARTNER_NAME";
public static final java.lang.String _DP_DATE = "DP_DATE";
public static final java.lang.String _DP_WEEK = "DP_WEEK";
public static final java.lang.String _DP_MONTH_YEAR = "DP_MONTH_YEAR";
public static final java.lang.String _DP_COUNTRY_CRITERIA_ID = "DP_COUNTRY_CRITERIA_ID";
public static final java.lang.String _DP_COUNTRY_NAME = "DP_COUNTRY_NAME";
public static final java.lang.String _DP_INVENTORY_TYPE = "DP_INVENTORY_TYPE";
public static final java.lang.String _DP_CREATIVE_SIZE = "DP_CREATIVE_SIZE";
public static final java.lang.String _DP_BRAND_NAME = "DP_BRAND_NAME";
public static final java.lang.String _DP_ADVERTISER_NAME = "DP_ADVERTISER_NAME";
public static final java.lang.String _DP_ADX_BUYER_NETWORK_NAME = "DP_ADX_BUYER_NETWORK_NAME";
public static final java.lang.String _DP_MOBILE_DEVICE_NAME = "DP_MOBILE_DEVICE_NAME";
public static final java.lang.String _DP_DEVICE_CATEGORY_NAME = "DP_DEVICE_CATEGORY_NAME";
public static final java.lang.String _DP_TAG_ID = "DP_TAG_ID";
public static final java.lang.String _DP_DEAL_ID = "DP_DEAL_ID";
public static final java.lang.String _DP_APP_ID = "DP_APP_ID";
public static final java.lang.String _CUSTOM_DIMENSION = "CUSTOM_DIMENSION";
public static final java.lang.String _DEMAND_CHANNEL_ID = "DEMAND_CHANNEL_ID";
public static final java.lang.String _DEMAND_CHANNEL_NAME = "DEMAND_CHANNEL_NAME";
public static final java.lang.String _DOMAIN = "DOMAIN";
public static final java.lang.String _SERVING_RESTRICTION_ID = "SERVING_RESTRICTION_ID";
public static final java.lang.String _SERVING_RESTRICTION_NAME = "SERVING_RESTRICTION_NAME";
public static final java.lang.String _UNIFIED_PRICING_RULE_ID = "UNIFIED_PRICING_RULE_ID";
public static final java.lang.String _UNIFIED_PRICING_RULE_NAME = "UNIFIED_PRICING_RULE_NAME";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_ID = "FIRST_LOOK_PRICING_RULE_ID";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_NAME = "FIRST_LOOK_PRICING_RULE_NAME";
public static final java.lang.String _BID_RANGE = "BID_RANGE";
public static final java.lang.String _BID_REJECTION_REASON = "BID_REJECTION_REASON";
public static final java.lang.String _BID_REJECTION_REASON_NAME = "BID_REJECTION_REASON_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_DOMAIN = "AD_TECHNOLOGY_PROVIDER_DOMAIN";
public static final java.lang.String _PROGRAMMATIC_DEAL_ID = "PROGRAMMATIC_DEAL_ID";
public static final java.lang.String _PROGRAMMATIC_DEAL_NAME = "PROGRAMMATIC_DEAL_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_ID = "AD_TECHNOLOGY_PROVIDER_ID";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_NAME = "AD_TECHNOLOGY_PROVIDER_NAME";
public static final java.lang.String _TCF_VENDOR_ID = "TCF_VENDOR_ID";
public static final java.lang.String _TCF_VENDOR_NAME = "TCF_VENDOR_NAME";
public static final java.lang.String _SITE_NAME = "SITE_NAME";
public static final java.lang.String _CHANNEL_NAME = "CHANNEL_NAME";
public static final java.lang.String _URL_ID = "URL_ID";
public static final java.lang.String _URL_NAME = "URL_NAME";
public static final java.lang.String _VIDEO_AD_DURATION = "VIDEO_AD_DURATION";
public static final java.lang.String _VIDEO_AD_TYPE_ID = "VIDEO_AD_TYPE_ID";
public static final java.lang.String _VIDEO_AD_TYPE_NAME = "VIDEO_AD_TYPE_NAME";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_CODE = "AD_EXCHANGE_PRODUCT_CODE";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_NAME = "AD_EXCHANGE_PRODUCT_NAME";
public static final java.lang.String _DYNAMIC_ALLOCATION_ID = "DYNAMIC_ALLOCATION_ID";
public static final java.lang.String _DYNAMIC_ALLOCATION_NAME = "DYNAMIC_ALLOCATION_NAME";
public static final java.lang.String _AD_TYPE_ID = "AD_TYPE_ID";
public static final java.lang.String _AD_TYPE_NAME = "AD_TYPE_NAME";
public static final java.lang.String _AD_LOCATION_ID = "AD_LOCATION_ID";
public static final java.lang.String _AD_LOCATION_NAME = "AD_LOCATION_NAME";
public static final java.lang.String _TARGETING_TYPE_CODE = "TARGETING_TYPE_CODE";
public static final java.lang.String _TARGETING_TYPE_NAME = "TARGETING_TYPE_NAME";
public static final java.lang.String _BRANDING_TYPE_CODE = "BRANDING_TYPE_CODE";
public static final java.lang.String _BRANDING_TYPE_NAME = "BRANDING_TYPE_NAME";
public static final java.lang.String _BANDWIDTH_ID = "BANDWIDTH_ID";
public static final java.lang.String _BANDWIDTH_NAME = "BANDWIDTH_NAME";
public static final java.lang.String _CARRIER_ID = "CARRIER_ID";
public static final java.lang.String _CARRIER_NAME = "CARRIER_NAME";
public static final Dimension MONTH_AND_YEAR = new Dimension(_MONTH_AND_YEAR);
public static final Dimension WEEK = new Dimension(_WEEK);
public static final Dimension DATE = new Dimension(_DATE);
public static final Dimension DAY = new Dimension(_DAY);
public static final Dimension HOUR = new Dimension(_HOUR);
public static final Dimension DATE_PT = new Dimension(_DATE_PT);
public static final Dimension WEEK_PT = new Dimension(_WEEK_PT);
public static final Dimension MONTH_YEAR_PT = new Dimension(_MONTH_YEAR_PT);
public static final Dimension DAY_OF_WEEK_PT = new Dimension(_DAY_OF_WEEK_PT);
public static final Dimension LINE_ITEM_ID = new Dimension(_LINE_ITEM_ID);
public static final Dimension LINE_ITEM_NAME = new Dimension(_LINE_ITEM_NAME);
public static final Dimension LINE_ITEM_TYPE = new Dimension(_LINE_ITEM_TYPE);
public static final Dimension ORDER_ID = new Dimension(_ORDER_ID);
public static final Dimension ORDER_NAME = new Dimension(_ORDER_NAME);
public static final Dimension ORDER_DELIVERY_STATUS = new Dimension(_ORDER_DELIVERY_STATUS);
public static final Dimension ADVERTISER_ID = new Dimension(_ADVERTISER_ID);
public static final Dimension ADVERTISER_NAME = new Dimension(_ADVERTISER_NAME);
public static final Dimension AD_NETWORK_ID = new Dimension(_AD_NETWORK_ID);
public static final Dimension AD_NETWORK_NAME = new Dimension(_AD_NETWORK_NAME);
public static final Dimension SALESPERSON_ID = new Dimension(_SALESPERSON_ID);
public static final Dimension SALESPERSON_NAME = new Dimension(_SALESPERSON_NAME);
public static final Dimension CREATIVE_ID = new Dimension(_CREATIVE_ID);
public static final Dimension CREATIVE_NAME = new Dimension(_CREATIVE_NAME);
public static final Dimension CREATIVE_TYPE = new Dimension(_CREATIVE_TYPE);
public static final Dimension CREATIVE_BILLING_TYPE = new Dimension(_CREATIVE_BILLING_TYPE);
public static final Dimension CUSTOM_EVENT_ID = new Dimension(_CUSTOM_EVENT_ID);
public static final Dimension CUSTOM_EVENT_NAME = new Dimension(_CUSTOM_EVENT_NAME);
public static final Dimension CUSTOM_EVENT_TYPE = new Dimension(_CUSTOM_EVENT_TYPE);
public static final Dimension CREATIVE_SIZE = new Dimension(_CREATIVE_SIZE);
public static final Dimension AD_UNIT_ID = new Dimension(_AD_UNIT_ID);
public static final Dimension AD_UNIT_NAME = new Dimension(_AD_UNIT_NAME);
public static final Dimension PARENT_AD_UNIT_ID = new Dimension(_PARENT_AD_UNIT_ID);
public static final Dimension PARENT_AD_UNIT_NAME = new Dimension(_PARENT_AD_UNIT_NAME);
public static final Dimension PLACEMENT_ID = new Dimension(_PLACEMENT_ID);
public static final Dimension PLACEMENT_NAME = new Dimension(_PLACEMENT_NAME);
public static final Dimension PLACEMENT_STATUS = new Dimension(_PLACEMENT_STATUS);
public static final Dimension TARGETING = new Dimension(_TARGETING);
public static final Dimension BROWSER_NAME = new Dimension(_BROWSER_NAME);
public static final Dimension DEVICE_CATEGORY_ID = new Dimension(_DEVICE_CATEGORY_ID);
public static final Dimension DEVICE_CATEGORY_NAME = new Dimension(_DEVICE_CATEGORY_NAME);
public static final Dimension COUNTRY_CRITERIA_ID = new Dimension(_COUNTRY_CRITERIA_ID);
public static final Dimension COUNTRY_CODE = new Dimension(_COUNTRY_CODE);
public static final Dimension COUNTRY_NAME = new Dimension(_COUNTRY_NAME);
public static final Dimension REGION_CRITERIA_ID = new Dimension(_REGION_CRITERIA_ID);
public static final Dimension REGION_NAME = new Dimension(_REGION_NAME);
public static final Dimension CITY_CRITERIA_ID = new Dimension(_CITY_CRITERIA_ID);
public static final Dimension CITY_NAME = new Dimension(_CITY_NAME);
public static final Dimension METRO_CRITERIA_ID = new Dimension(_METRO_CRITERIA_ID);
public static final Dimension METRO_NAME = new Dimension(_METRO_NAME);
public static final Dimension POSTAL_CODE_CRITERIA_ID = new Dimension(_POSTAL_CODE_CRITERIA_ID);
public static final Dimension POSTAL_CODE = new Dimension(_POSTAL_CODE);
public static final Dimension CUSTOM_TARGETING_VALUE_ID = new Dimension(_CUSTOM_TARGETING_VALUE_ID);
public static final Dimension CUSTOM_CRITERIA = new Dimension(_CUSTOM_CRITERIA);
public static final Dimension CONTENT_ID = new Dimension(_CONTENT_ID);
public static final Dimension CONTENT_NAME = new Dimension(_CONTENT_NAME);
public static final Dimension CONTENT_BUNDLE_ID = new Dimension(_CONTENT_BUNDLE_ID);
public static final Dimension CONTENT_BUNDLE_NAME = new Dimension(_CONTENT_BUNDLE_NAME);
public static final Dimension CMS_METADATA = new Dimension(_CMS_METADATA);
public static final Dimension VIDEO_FALLBACK_POSITION = new Dimension(_VIDEO_FALLBACK_POSITION);
public static final Dimension POSITION_OF_POD = new Dimension(_POSITION_OF_POD);
public static final Dimension POSITION_IN_POD = new Dimension(_POSITION_IN_POD);
public static final Dimension CUSTOM_SPOT_ID = new Dimension(_CUSTOM_SPOT_ID);
public static final Dimension CUSTOM_SPOT_NAME = new Dimension(_CUSTOM_SPOT_NAME);
public static final Dimension VIDEO_REDIRECT_THIRD_PARTY = new Dimension(_VIDEO_REDIRECT_THIRD_PARTY);
public static final Dimension VIDEO_BREAK_TYPE = new Dimension(_VIDEO_BREAK_TYPE);
public static final Dimension VIDEO_BREAK_TYPE_NAME = new Dimension(_VIDEO_BREAK_TYPE_NAME);
public static final Dimension VIDEO_VAST_VERSION = new Dimension(_VIDEO_VAST_VERSION);
public static final Dimension VIDEO_AD_REQUEST_DURATION_ID = new Dimension(_VIDEO_AD_REQUEST_DURATION_ID);
public static final Dimension VIDEO_AD_REQUEST_DURATION = new Dimension(_VIDEO_AD_REQUEST_DURATION);
public static final Dimension VIDEO_PLCMT_ID = new Dimension(_VIDEO_PLCMT_ID);
public static final Dimension VIDEO_PLCMT_NAME = new Dimension(_VIDEO_PLCMT_NAME);
public static final Dimension INVENTORY_FORMAT = new Dimension(_INVENTORY_FORMAT);
public static final Dimension INVENTORY_FORMAT_NAME = new Dimension(_INVENTORY_FORMAT_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_NAME);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_ID = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_ID);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_NAME = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_ID = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_ID);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_NAME = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_OUTCOME = new Dimension(_INVENTORY_SHARE_OUTCOME);
public static final Dimension GRP_DEMOGRAPHICS = new Dimension(_GRP_DEMOGRAPHICS);
public static final Dimension AD_REQUEST_AD_UNIT_SIZES = new Dimension(_AD_REQUEST_AD_UNIT_SIZES);
public static final Dimension AD_REQUEST_CUSTOM_CRITERIA = new Dimension(_AD_REQUEST_CUSTOM_CRITERIA);
public static final Dimension IS_FIRST_LOOK_DEAL = new Dimension(_IS_FIRST_LOOK_DEAL);
public static final Dimension IS_ADX_DIRECT = new Dimension(_IS_ADX_DIRECT);
public static final Dimension YIELD_GROUP_ID = new Dimension(_YIELD_GROUP_ID);
public static final Dimension YIELD_GROUP_NAME = new Dimension(_YIELD_GROUP_NAME);
public static final Dimension YIELD_PARTNER = new Dimension(_YIELD_PARTNER);
public static final Dimension YIELD_PARTNER_TAG = new Dimension(_YIELD_PARTNER_TAG);
public static final Dimension EXCHANGE_BIDDING_DEAL_ID = new Dimension(_EXCHANGE_BIDDING_DEAL_ID);
public static final Dimension EXCHANGE_BIDDING_DEAL_TYPE = new Dimension(_EXCHANGE_BIDDING_DEAL_TYPE);
public static final Dimension CLASSIFIED_ADVERTISER_ID = new Dimension(_CLASSIFIED_ADVERTISER_ID);
public static final Dimension CLASSIFIED_ADVERTISER_NAME = new Dimension(_CLASSIFIED_ADVERTISER_NAME);
public static final Dimension CLASSIFIED_BRAND_ID = new Dimension(_CLASSIFIED_BRAND_ID);
public static final Dimension CLASSIFIED_BRAND_NAME = new Dimension(_CLASSIFIED_BRAND_NAME);
public static final Dimension MEDIATION_TYPE = new Dimension(_MEDIATION_TYPE);
public static final Dimension NATIVE_TEMPLATE_ID = new Dimension(_NATIVE_TEMPLATE_ID);
public static final Dimension NATIVE_TEMPLATE_NAME = new Dimension(_NATIVE_TEMPLATE_NAME);
public static final Dimension NATIVE_STYLE_ID = new Dimension(_NATIVE_STYLE_ID);
public static final Dimension NATIVE_STYLE_NAME = new Dimension(_NATIVE_STYLE_NAME);
public static final Dimension CHILD_NETWORK_CODE = new Dimension(_CHILD_NETWORK_CODE);
public static final Dimension MOBILE_APP_RESOLVED_ID = new Dimension(_MOBILE_APP_RESOLVED_ID);
public static final Dimension MOBILE_APP_NAME = new Dimension(_MOBILE_APP_NAME);
public static final Dimension MOBILE_DEVICE_NAME = new Dimension(_MOBILE_DEVICE_NAME);
public static final Dimension MOBILE_INVENTORY_TYPE = new Dimension(_MOBILE_INVENTORY_TYPE);
public static final Dimension OPERATING_SYSTEM_VERSION_ID = new Dimension(_OPERATING_SYSTEM_VERSION_ID);
public static final Dimension OPERATING_SYSTEM_VERSION_NAME = new Dimension(_OPERATING_SYSTEM_VERSION_NAME);
public static final Dimension REQUEST_TYPE = new Dimension(_REQUEST_TYPE);
public static final Dimension AD_UNIT_STATUS = new Dimension(_AD_UNIT_STATUS);
public static final Dimension MASTER_COMPANION_CREATIVE_ID = new Dimension(_MASTER_COMPANION_CREATIVE_ID);
public static final Dimension MASTER_COMPANION_CREATIVE_NAME = new Dimension(_MASTER_COMPANION_CREATIVE_NAME);
public static final Dimension AUDIENCE_SEGMENT_ID = new Dimension(_AUDIENCE_SEGMENT_ID);
public static final Dimension AUDIENCE_SEGMENT_NAME = new Dimension(_AUDIENCE_SEGMENT_NAME);
public static final Dimension AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = new Dimension(_AUDIENCE_SEGMENT_DATA_PROVIDER_NAME);
public static final Dimension WEB_PROPERTY_CODE = new Dimension(_WEB_PROPERTY_CODE);
public static final Dimension BUYING_AGENCY_NAME = new Dimension(_BUYING_AGENCY_NAME);
public static final Dimension BUYER_NETWORK_ID = new Dimension(_BUYER_NETWORK_ID);
public static final Dimension BUYER_NETWORK_NAME = new Dimension(_BUYER_NETWORK_NAME);
public static final Dimension BIDDER_ID = new Dimension(_BIDDER_ID);
public static final Dimension BIDDER_NAME = new Dimension(_BIDDER_NAME);
public static final Dimension ADVERTISER_DOMAIN_NAME = new Dimension(_ADVERTISER_DOMAIN_NAME);
public static final Dimension AD_EXCHANGE_OPTIMIZATION_TYPE = new Dimension(_AD_EXCHANGE_OPTIMIZATION_TYPE);
public static final Dimension ADVERTISER_VERTICAL_NAME = new Dimension(_ADVERTISER_VERTICAL_NAME);
public static final Dimension NIELSEN_SEGMENT = new Dimension(_NIELSEN_SEGMENT);
public static final Dimension NIELSEN_DEMOGRAPHICS = new Dimension(_NIELSEN_DEMOGRAPHICS);
public static final Dimension NIELSEN_RESTATEMENT_DATE = new Dimension(_NIELSEN_RESTATEMENT_DATE);
public static final Dimension NIELSEN_DEVICE_ID = new Dimension(_NIELSEN_DEVICE_ID);
public static final Dimension NIELSEN_DEVICE_NAME = new Dimension(_NIELSEN_DEVICE_NAME);
public static final Dimension PROGRAMMATIC_BUYER_ID = new Dimension(_PROGRAMMATIC_BUYER_ID);
public static final Dimension PROGRAMMATIC_BUYER_NAME = new Dimension(_PROGRAMMATIC_BUYER_NAME);
public static final Dimension REQUESTED_AD_SIZES = new Dimension(_REQUESTED_AD_SIZES);
public static final Dimension CREATIVE_SIZE_DELIVERED = new Dimension(_CREATIVE_SIZE_DELIVERED);
public static final Dimension PROGRAMMATIC_CHANNEL_ID = new Dimension(_PROGRAMMATIC_CHANNEL_ID);
public static final Dimension PROGRAMMATIC_CHANNEL_NAME = new Dimension(_PROGRAMMATIC_CHANNEL_NAME);
public static final Dimension CLASSIFIED_YIELD_PARTNER_NAME = new Dimension(_CLASSIFIED_YIELD_PARTNER_NAME);
public static final Dimension DP_DATE = new Dimension(_DP_DATE);
public static final Dimension DP_WEEK = new Dimension(_DP_WEEK);
public static final Dimension DP_MONTH_YEAR = new Dimension(_DP_MONTH_YEAR);
public static final Dimension DP_COUNTRY_CRITERIA_ID = new Dimension(_DP_COUNTRY_CRITERIA_ID);
public static final Dimension DP_COUNTRY_NAME = new Dimension(_DP_COUNTRY_NAME);
public static final Dimension DP_INVENTORY_TYPE = new Dimension(_DP_INVENTORY_TYPE);
public static final Dimension DP_CREATIVE_SIZE = new Dimension(_DP_CREATIVE_SIZE);
public static final Dimension DP_BRAND_NAME = new Dimension(_DP_BRAND_NAME);
public static final Dimension DP_ADVERTISER_NAME = new Dimension(_DP_ADVERTISER_NAME);
public static final Dimension DP_ADX_BUYER_NETWORK_NAME = new Dimension(_DP_ADX_BUYER_NETWORK_NAME);
public static final Dimension DP_MOBILE_DEVICE_NAME = new Dimension(_DP_MOBILE_DEVICE_NAME);
public static final Dimension DP_DEVICE_CATEGORY_NAME = new Dimension(_DP_DEVICE_CATEGORY_NAME);
public static final Dimension DP_TAG_ID = new Dimension(_DP_TAG_ID);
public static final Dimension DP_DEAL_ID = new Dimension(_DP_DEAL_ID);
public static final Dimension DP_APP_ID = new Dimension(_DP_APP_ID);
public static final Dimension CUSTOM_DIMENSION = new Dimension(_CUSTOM_DIMENSION);
public static final Dimension DEMAND_CHANNEL_ID = new Dimension(_DEMAND_CHANNEL_ID);
public static final Dimension DEMAND_CHANNEL_NAME = new Dimension(_DEMAND_CHANNEL_NAME);
public static final Dimension DOMAIN = new Dimension(_DOMAIN);
public static final Dimension SERVING_RESTRICTION_ID = new Dimension(_SERVING_RESTRICTION_ID);
public static final Dimension SERVING_RESTRICTION_NAME = new Dimension(_SERVING_RESTRICTION_NAME);
public static final Dimension UNIFIED_PRICING_RULE_ID = new Dimension(_UNIFIED_PRICING_RULE_ID);
public static final Dimension UNIFIED_PRICING_RULE_NAME = new Dimension(_UNIFIED_PRICING_RULE_NAME);
public static final Dimension FIRST_LOOK_PRICING_RULE_ID = new Dimension(_FIRST_LOOK_PRICING_RULE_ID);
public static final Dimension FIRST_LOOK_PRICING_RULE_NAME = new Dimension(_FIRST_LOOK_PRICING_RULE_NAME);
public static final Dimension BID_RANGE = new Dimension(_BID_RANGE);
public static final Dimension BID_REJECTION_REASON = new Dimension(_BID_REJECTION_REASON);
public static final Dimension BID_REJECTION_REASON_NAME = new Dimension(_BID_REJECTION_REASON_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_DOMAIN = new Dimension(_AD_TECHNOLOGY_PROVIDER_DOMAIN);
public static final Dimension PROGRAMMATIC_DEAL_ID = new Dimension(_PROGRAMMATIC_DEAL_ID);
public static final Dimension PROGRAMMATIC_DEAL_NAME = new Dimension(_PROGRAMMATIC_DEAL_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_ID = new Dimension(_AD_TECHNOLOGY_PROVIDER_ID);
public static final Dimension AD_TECHNOLOGY_PROVIDER_NAME = new Dimension(_AD_TECHNOLOGY_PROVIDER_NAME);
public static final Dimension TCF_VENDOR_ID = new Dimension(_TCF_VENDOR_ID);
public static final Dimension TCF_VENDOR_NAME = new Dimension(_TCF_VENDOR_NAME);
public static final Dimension SITE_NAME = new Dimension(_SITE_NAME);
public static final Dimension CHANNEL_NAME = new Dimension(_CHANNEL_NAME);
public static final Dimension URL_ID = new Dimension(_URL_ID);
public static final Dimension URL_NAME = new Dimension(_URL_NAME);
public static final Dimension VIDEO_AD_DURATION = new Dimension(_VIDEO_AD_DURATION);
public static final Dimension VIDEO_AD_TYPE_ID = new Dimension(_VIDEO_AD_TYPE_ID);
public static final Dimension VIDEO_AD_TYPE_NAME = new Dimension(_VIDEO_AD_TYPE_NAME);
public static final Dimension AD_EXCHANGE_PRODUCT_CODE = new Dimension(_AD_EXCHANGE_PRODUCT_CODE);
public static final Dimension AD_EXCHANGE_PRODUCT_NAME = new Dimension(_AD_EXCHANGE_PRODUCT_NAME);
public static final Dimension DYNAMIC_ALLOCATION_ID = new Dimension(_DYNAMIC_ALLOCATION_ID);
public static final Dimension DYNAMIC_ALLOCATION_NAME = new Dimension(_DYNAMIC_ALLOCATION_NAME);
public static final Dimension AD_TYPE_ID = new Dimension(_AD_TYPE_ID);
public static final Dimension AD_TYPE_NAME = new Dimension(_AD_TYPE_NAME);
public static final Dimension AD_LOCATION_ID = new Dimension(_AD_LOCATION_ID);
public static final Dimension AD_LOCATION_NAME = new Dimension(_AD_LOCATION_NAME);
public static final Dimension TARGETING_TYPE_CODE = new Dimension(_TARGETING_TYPE_CODE);
public static final Dimension TARGETING_TYPE_NAME = new Dimension(_TARGETING_TYPE_NAME);
public static final Dimension BRANDING_TYPE_CODE = new Dimension(_BRANDING_TYPE_CODE);
public static final Dimension BRANDING_TYPE_NAME = new Dimension(_BRANDING_TYPE_NAME);
public static final Dimension BANDWIDTH_ID = new Dimension(_BANDWIDTH_ID);
public static final Dimension BANDWIDTH_NAME = new Dimension(_BANDWIDTH_NAME);
public static final Dimension CARRIER_ID = new Dimension(_CARRIER_ID);
public static final Dimension CARRIER_NAME = new Dimension(_CARRIER_NAME);
public java.lang.String getValue() { return _value_;}
public static Dimension fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
Dimension enumeration = (Dimension)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static Dimension fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Dimension.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202502", "Dimension"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
|
googleads/googleads-java-lib | 37,064 | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202505/Dimension.java | // Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Dimension.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4.1-SNAPSHOT Mar 20, 2024 (11:59:10 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202505;
public class Dimension implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected Dimension(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _MONTH_AND_YEAR = "MONTH_AND_YEAR";
public static final java.lang.String _WEEK = "WEEK";
public static final java.lang.String _DATE = "DATE";
public static final java.lang.String _DAY = "DAY";
public static final java.lang.String _HOUR = "HOUR";
public static final java.lang.String _DATE_PT = "DATE_PT";
public static final java.lang.String _WEEK_PT = "WEEK_PT";
public static final java.lang.String _MONTH_YEAR_PT = "MONTH_YEAR_PT";
public static final java.lang.String _DAY_OF_WEEK_PT = "DAY_OF_WEEK_PT";
public static final java.lang.String _LINE_ITEM_ID = "LINE_ITEM_ID";
public static final java.lang.String _LINE_ITEM_NAME = "LINE_ITEM_NAME";
public static final java.lang.String _LINE_ITEM_TYPE = "LINE_ITEM_TYPE";
public static final java.lang.String _ORDER_ID = "ORDER_ID";
public static final java.lang.String _ORDER_NAME = "ORDER_NAME";
public static final java.lang.String _ORDER_DELIVERY_STATUS = "ORDER_DELIVERY_STATUS";
public static final java.lang.String _ADVERTISER_ID = "ADVERTISER_ID";
public static final java.lang.String _ADVERTISER_NAME = "ADVERTISER_NAME";
public static final java.lang.String _AD_NETWORK_ID = "AD_NETWORK_ID";
public static final java.lang.String _AD_NETWORK_NAME = "AD_NETWORK_NAME";
public static final java.lang.String _SALESPERSON_ID = "SALESPERSON_ID";
public static final java.lang.String _SALESPERSON_NAME = "SALESPERSON_NAME";
public static final java.lang.String _CREATIVE_ID = "CREATIVE_ID";
public static final java.lang.String _CREATIVE_NAME = "CREATIVE_NAME";
public static final java.lang.String _CREATIVE_TYPE = "CREATIVE_TYPE";
public static final java.lang.String _CREATIVE_BILLING_TYPE = "CREATIVE_BILLING_TYPE";
public static final java.lang.String _CUSTOM_EVENT_ID = "CUSTOM_EVENT_ID";
public static final java.lang.String _CUSTOM_EVENT_NAME = "CUSTOM_EVENT_NAME";
public static final java.lang.String _CUSTOM_EVENT_TYPE = "CUSTOM_EVENT_TYPE";
public static final java.lang.String _CREATIVE_SIZE = "CREATIVE_SIZE";
public static final java.lang.String _AD_UNIT_ID = "AD_UNIT_ID";
public static final java.lang.String _AD_UNIT_NAME = "AD_UNIT_NAME";
public static final java.lang.String _PARENT_AD_UNIT_ID = "PARENT_AD_UNIT_ID";
public static final java.lang.String _PARENT_AD_UNIT_NAME = "PARENT_AD_UNIT_NAME";
public static final java.lang.String _PLACEMENT_ID = "PLACEMENT_ID";
public static final java.lang.String _PLACEMENT_NAME = "PLACEMENT_NAME";
public static final java.lang.String _PLACEMENT_STATUS = "PLACEMENT_STATUS";
public static final java.lang.String _TARGETING = "TARGETING";
public static final java.lang.String _BROWSER_NAME = "BROWSER_NAME";
public static final java.lang.String _DEVICE_CATEGORY_ID = "DEVICE_CATEGORY_ID";
public static final java.lang.String _DEVICE_CATEGORY_NAME = "DEVICE_CATEGORY_NAME";
public static final java.lang.String _COUNTRY_CRITERIA_ID = "COUNTRY_CRITERIA_ID";
public static final java.lang.String _COUNTRY_CODE = "COUNTRY_CODE";
public static final java.lang.String _COUNTRY_NAME = "COUNTRY_NAME";
public static final java.lang.String _REGION_CRITERIA_ID = "REGION_CRITERIA_ID";
public static final java.lang.String _REGION_NAME = "REGION_NAME";
public static final java.lang.String _CITY_CRITERIA_ID = "CITY_CRITERIA_ID";
public static final java.lang.String _CITY_NAME = "CITY_NAME";
public static final java.lang.String _METRO_CRITERIA_ID = "METRO_CRITERIA_ID";
public static final java.lang.String _METRO_NAME = "METRO_NAME";
public static final java.lang.String _POSTAL_CODE_CRITERIA_ID = "POSTAL_CODE_CRITERIA_ID";
public static final java.lang.String _POSTAL_CODE = "POSTAL_CODE";
public static final java.lang.String _CUSTOM_TARGETING_VALUE_ID = "CUSTOM_TARGETING_VALUE_ID";
public static final java.lang.String _CUSTOM_CRITERIA = "CUSTOM_CRITERIA";
public static final java.lang.String _CONTENT_ID = "CONTENT_ID";
public static final java.lang.String _CONTENT_NAME = "CONTENT_NAME";
public static final java.lang.String _CONTENT_BUNDLE_ID = "CONTENT_BUNDLE_ID";
public static final java.lang.String _CONTENT_BUNDLE_NAME = "CONTENT_BUNDLE_NAME";
public static final java.lang.String _CMS_METADATA = "CMS_METADATA";
public static final java.lang.String _VIDEO_FALLBACK_POSITION = "VIDEO_FALLBACK_POSITION";
public static final java.lang.String _POSITION_OF_POD = "POSITION_OF_POD";
public static final java.lang.String _POSITION_IN_POD = "POSITION_IN_POD";
public static final java.lang.String _CUSTOM_SPOT_ID = "CUSTOM_SPOT_ID";
public static final java.lang.String _CUSTOM_SPOT_NAME = "CUSTOM_SPOT_NAME";
public static final java.lang.String _VIDEO_REDIRECT_THIRD_PARTY = "VIDEO_REDIRECT_THIRD_PARTY";
public static final java.lang.String _VIDEO_BREAK_TYPE = "VIDEO_BREAK_TYPE";
public static final java.lang.String _VIDEO_BREAK_TYPE_NAME = "VIDEO_BREAK_TYPE_NAME";
public static final java.lang.String _VIDEO_VAST_VERSION = "VIDEO_VAST_VERSION";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION_ID = "VIDEO_AD_REQUEST_DURATION_ID";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION = "VIDEO_AD_REQUEST_DURATION";
public static final java.lang.String _VIDEO_PLCMT_ID = "VIDEO_PLCMT_ID";
public static final java.lang.String _VIDEO_PLCMT_NAME = "VIDEO_PLCMT_NAME";
public static final java.lang.String _INVENTORY_FORMAT = "INVENTORY_FORMAT";
public static final java.lang.String _INVENTORY_FORMAT_NAME = "INVENTORY_FORMAT_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_ID = "PARTNER_MANAGEMENT_PARTNER_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_NAME = "PARTNER_MANAGEMENT_PARTNER_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_ID = "PARTNER_MANAGEMENT_PARTNER_LABEL_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = "PARTNER_MANAGEMENT_PARTNER_LABEL_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_ID = "PARTNER_MANAGEMENT_ASSIGNMENT_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_NAME = "PARTNER_MANAGEMENT_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_ID = "INVENTORY_SHARE_ASSIGNMENT_ID";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_NAME = "INVENTORY_SHARE_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_OUTCOME = "INVENTORY_SHARE_OUTCOME";
public static final java.lang.String _GRP_DEMOGRAPHICS = "GRP_DEMOGRAPHICS";
public static final java.lang.String _AD_REQUEST_AD_UNIT_SIZES = "AD_REQUEST_AD_UNIT_SIZES";
public static final java.lang.String _AD_REQUEST_CUSTOM_CRITERIA = "AD_REQUEST_CUSTOM_CRITERIA";
public static final java.lang.String _IS_FIRST_LOOK_DEAL = "IS_FIRST_LOOK_DEAL";
public static final java.lang.String _IS_ADX_DIRECT = "IS_ADX_DIRECT";
public static final java.lang.String _YIELD_GROUP_ID = "YIELD_GROUP_ID";
public static final java.lang.String _YIELD_GROUP_NAME = "YIELD_GROUP_NAME";
public static final java.lang.String _YIELD_PARTNER = "YIELD_PARTNER";
public static final java.lang.String _YIELD_PARTNER_TAG = "YIELD_PARTNER_TAG";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_ID = "EXCHANGE_BIDDING_DEAL_ID";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_TYPE = "EXCHANGE_BIDDING_DEAL_TYPE";
public static final java.lang.String _CLASSIFIED_ADVERTISER_ID = "CLASSIFIED_ADVERTISER_ID";
public static final java.lang.String _CLASSIFIED_ADVERTISER_NAME = "CLASSIFIED_ADVERTISER_NAME";
public static final java.lang.String _CLASSIFIED_BRAND_ID = "CLASSIFIED_BRAND_ID";
public static final java.lang.String _CLASSIFIED_BRAND_NAME = "CLASSIFIED_BRAND_NAME";
public static final java.lang.String _MEDIATION_TYPE = "MEDIATION_TYPE";
public static final java.lang.String _NATIVE_TEMPLATE_ID = "NATIVE_TEMPLATE_ID";
public static final java.lang.String _NATIVE_TEMPLATE_NAME = "NATIVE_TEMPLATE_NAME";
public static final java.lang.String _NATIVE_STYLE_ID = "NATIVE_STYLE_ID";
public static final java.lang.String _NATIVE_STYLE_NAME = "NATIVE_STYLE_NAME";
public static final java.lang.String _CHILD_NETWORK_CODE = "CHILD_NETWORK_CODE";
public static final java.lang.String _MOBILE_APP_RESOLVED_ID = "MOBILE_APP_RESOLVED_ID";
public static final java.lang.String _MOBILE_APP_NAME = "MOBILE_APP_NAME";
public static final java.lang.String _MOBILE_DEVICE_NAME = "MOBILE_DEVICE_NAME";
public static final java.lang.String _MOBILE_INVENTORY_TYPE = "MOBILE_INVENTORY_TYPE";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_ID = "OPERATING_SYSTEM_VERSION_ID";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_NAME = "OPERATING_SYSTEM_VERSION_NAME";
public static final java.lang.String _REQUEST_TYPE = "REQUEST_TYPE";
public static final java.lang.String _AD_UNIT_STATUS = "AD_UNIT_STATUS";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_ID = "MASTER_COMPANION_CREATIVE_ID";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_NAME = "MASTER_COMPANION_CREATIVE_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_ID = "AUDIENCE_SEGMENT_ID";
public static final java.lang.String _AUDIENCE_SEGMENT_NAME = "AUDIENCE_SEGMENT_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = "AUDIENCE_SEGMENT_DATA_PROVIDER_NAME";
public static final java.lang.String _WEB_PROPERTY_CODE = "WEB_PROPERTY_CODE";
public static final java.lang.String _BUYING_AGENCY_NAME = "BUYING_AGENCY_NAME";
public static final java.lang.String _BUYER_NETWORK_ID = "BUYER_NETWORK_ID";
public static final java.lang.String _BUYER_NETWORK_NAME = "BUYER_NETWORK_NAME";
public static final java.lang.String _BIDDER_ID = "BIDDER_ID";
public static final java.lang.String _BIDDER_NAME = "BIDDER_NAME";
public static final java.lang.String _ADVERTISER_DOMAIN_NAME = "ADVERTISER_DOMAIN_NAME";
public static final java.lang.String _AD_EXCHANGE_OPTIMIZATION_TYPE = "AD_EXCHANGE_OPTIMIZATION_TYPE";
public static final java.lang.String _ADVERTISER_VERTICAL_NAME = "ADVERTISER_VERTICAL_NAME";
public static final java.lang.String _NIELSEN_SEGMENT = "NIELSEN_SEGMENT";
public static final java.lang.String _NIELSEN_DEMOGRAPHICS = "NIELSEN_DEMOGRAPHICS";
public static final java.lang.String _NIELSEN_RESTATEMENT_DATE = "NIELSEN_RESTATEMENT_DATE";
public static final java.lang.String _NIELSEN_DEVICE_ID = "NIELSEN_DEVICE_ID";
public static final java.lang.String _NIELSEN_DEVICE_NAME = "NIELSEN_DEVICE_NAME";
public static final java.lang.String _PROGRAMMATIC_BUYER_ID = "PROGRAMMATIC_BUYER_ID";
public static final java.lang.String _PROGRAMMATIC_BUYER_NAME = "PROGRAMMATIC_BUYER_NAME";
public static final java.lang.String _REQUESTED_AD_SIZES = "REQUESTED_AD_SIZES";
public static final java.lang.String _CREATIVE_SIZE_DELIVERED = "CREATIVE_SIZE_DELIVERED";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_ID = "PROGRAMMATIC_CHANNEL_ID";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_NAME = "PROGRAMMATIC_CHANNEL_NAME";
public static final java.lang.String _CLASSIFIED_YIELD_PARTNER_NAME = "CLASSIFIED_YIELD_PARTNER_NAME";
public static final java.lang.String _DP_DATE = "DP_DATE";
public static final java.lang.String _DP_WEEK = "DP_WEEK";
public static final java.lang.String _DP_MONTH_YEAR = "DP_MONTH_YEAR";
public static final java.lang.String _DP_COUNTRY_CRITERIA_ID = "DP_COUNTRY_CRITERIA_ID";
public static final java.lang.String _DP_COUNTRY_NAME = "DP_COUNTRY_NAME";
public static final java.lang.String _DP_INVENTORY_TYPE = "DP_INVENTORY_TYPE";
public static final java.lang.String _DP_CREATIVE_SIZE = "DP_CREATIVE_SIZE";
public static final java.lang.String _DP_BRAND_NAME = "DP_BRAND_NAME";
public static final java.lang.String _DP_ADVERTISER_NAME = "DP_ADVERTISER_NAME";
public static final java.lang.String _DP_ADX_BUYER_NETWORK_NAME = "DP_ADX_BUYER_NETWORK_NAME";
public static final java.lang.String _DP_MOBILE_DEVICE_NAME = "DP_MOBILE_DEVICE_NAME";
public static final java.lang.String _DP_DEVICE_CATEGORY_NAME = "DP_DEVICE_CATEGORY_NAME";
public static final java.lang.String _DP_TAG_ID = "DP_TAG_ID";
public static final java.lang.String _DP_DEAL_ID = "DP_DEAL_ID";
public static final java.lang.String _DP_APP_ID = "DP_APP_ID";
public static final java.lang.String _CUSTOM_DIMENSION = "CUSTOM_DIMENSION";
public static final java.lang.String _DEMAND_CHANNEL_ID = "DEMAND_CHANNEL_ID";
public static final java.lang.String _DEMAND_CHANNEL_NAME = "DEMAND_CHANNEL_NAME";
public static final java.lang.String _DOMAIN = "DOMAIN";
public static final java.lang.String _SERVING_RESTRICTION_ID = "SERVING_RESTRICTION_ID";
public static final java.lang.String _SERVING_RESTRICTION_NAME = "SERVING_RESTRICTION_NAME";
public static final java.lang.String _UNIFIED_PRICING_RULE_ID = "UNIFIED_PRICING_RULE_ID";
public static final java.lang.String _UNIFIED_PRICING_RULE_NAME = "UNIFIED_PRICING_RULE_NAME";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_ID = "FIRST_LOOK_PRICING_RULE_ID";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_NAME = "FIRST_LOOK_PRICING_RULE_NAME";
public static final java.lang.String _BID_RANGE = "BID_RANGE";
public static final java.lang.String _BID_REJECTION_REASON = "BID_REJECTION_REASON";
public static final java.lang.String _BID_REJECTION_REASON_NAME = "BID_REJECTION_REASON_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_DOMAIN = "AD_TECHNOLOGY_PROVIDER_DOMAIN";
public static final java.lang.String _PROGRAMMATIC_DEAL_ID = "PROGRAMMATIC_DEAL_ID";
public static final java.lang.String _PROGRAMMATIC_DEAL_NAME = "PROGRAMMATIC_DEAL_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_ID = "AD_TECHNOLOGY_PROVIDER_ID";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_NAME = "AD_TECHNOLOGY_PROVIDER_NAME";
public static final java.lang.String _TCF_VENDOR_ID = "TCF_VENDOR_ID";
public static final java.lang.String _TCF_VENDOR_NAME = "TCF_VENDOR_NAME";
public static final java.lang.String _SITE_NAME = "SITE_NAME";
public static final java.lang.String _CHANNEL_NAME = "CHANNEL_NAME";
public static final java.lang.String _URL_ID = "URL_ID";
public static final java.lang.String _URL_NAME = "URL_NAME";
public static final java.lang.String _VIDEO_AD_DURATION = "VIDEO_AD_DURATION";
public static final java.lang.String _VIDEO_AD_TYPE_ID = "VIDEO_AD_TYPE_ID";
public static final java.lang.String _VIDEO_AD_TYPE_NAME = "VIDEO_AD_TYPE_NAME";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_CODE = "AD_EXCHANGE_PRODUCT_CODE";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_NAME = "AD_EXCHANGE_PRODUCT_NAME";
public static final java.lang.String _DYNAMIC_ALLOCATION_ID = "DYNAMIC_ALLOCATION_ID";
public static final java.lang.String _DYNAMIC_ALLOCATION_NAME = "DYNAMIC_ALLOCATION_NAME";
public static final java.lang.String _AD_TYPE_ID = "AD_TYPE_ID";
public static final java.lang.String _AD_TYPE_NAME = "AD_TYPE_NAME";
public static final java.lang.String _AD_LOCATION_ID = "AD_LOCATION_ID";
public static final java.lang.String _AD_LOCATION_NAME = "AD_LOCATION_NAME";
public static final java.lang.String _TARGETING_TYPE_CODE = "TARGETING_TYPE_CODE";
public static final java.lang.String _TARGETING_TYPE_NAME = "TARGETING_TYPE_NAME";
public static final java.lang.String _BRANDING_TYPE_CODE = "BRANDING_TYPE_CODE";
public static final java.lang.String _BRANDING_TYPE_NAME = "BRANDING_TYPE_NAME";
public static final java.lang.String _BANDWIDTH_ID = "BANDWIDTH_ID";
public static final java.lang.String _BANDWIDTH_NAME = "BANDWIDTH_NAME";
public static final java.lang.String _CARRIER_ID = "CARRIER_ID";
public static final java.lang.String _CARRIER_NAME = "CARRIER_NAME";
public static final Dimension MONTH_AND_YEAR = new Dimension(_MONTH_AND_YEAR);
public static final Dimension WEEK = new Dimension(_WEEK);
public static final Dimension DATE = new Dimension(_DATE);
public static final Dimension DAY = new Dimension(_DAY);
public static final Dimension HOUR = new Dimension(_HOUR);
public static final Dimension DATE_PT = new Dimension(_DATE_PT);
public static final Dimension WEEK_PT = new Dimension(_WEEK_PT);
public static final Dimension MONTH_YEAR_PT = new Dimension(_MONTH_YEAR_PT);
public static final Dimension DAY_OF_WEEK_PT = new Dimension(_DAY_OF_WEEK_PT);
public static final Dimension LINE_ITEM_ID = new Dimension(_LINE_ITEM_ID);
public static final Dimension LINE_ITEM_NAME = new Dimension(_LINE_ITEM_NAME);
public static final Dimension LINE_ITEM_TYPE = new Dimension(_LINE_ITEM_TYPE);
public static final Dimension ORDER_ID = new Dimension(_ORDER_ID);
public static final Dimension ORDER_NAME = new Dimension(_ORDER_NAME);
public static final Dimension ORDER_DELIVERY_STATUS = new Dimension(_ORDER_DELIVERY_STATUS);
public static final Dimension ADVERTISER_ID = new Dimension(_ADVERTISER_ID);
public static final Dimension ADVERTISER_NAME = new Dimension(_ADVERTISER_NAME);
public static final Dimension AD_NETWORK_ID = new Dimension(_AD_NETWORK_ID);
public static final Dimension AD_NETWORK_NAME = new Dimension(_AD_NETWORK_NAME);
public static final Dimension SALESPERSON_ID = new Dimension(_SALESPERSON_ID);
public static final Dimension SALESPERSON_NAME = new Dimension(_SALESPERSON_NAME);
public static final Dimension CREATIVE_ID = new Dimension(_CREATIVE_ID);
public static final Dimension CREATIVE_NAME = new Dimension(_CREATIVE_NAME);
public static final Dimension CREATIVE_TYPE = new Dimension(_CREATIVE_TYPE);
public static final Dimension CREATIVE_BILLING_TYPE = new Dimension(_CREATIVE_BILLING_TYPE);
public static final Dimension CUSTOM_EVENT_ID = new Dimension(_CUSTOM_EVENT_ID);
public static final Dimension CUSTOM_EVENT_NAME = new Dimension(_CUSTOM_EVENT_NAME);
public static final Dimension CUSTOM_EVENT_TYPE = new Dimension(_CUSTOM_EVENT_TYPE);
public static final Dimension CREATIVE_SIZE = new Dimension(_CREATIVE_SIZE);
public static final Dimension AD_UNIT_ID = new Dimension(_AD_UNIT_ID);
public static final Dimension AD_UNIT_NAME = new Dimension(_AD_UNIT_NAME);
public static final Dimension PARENT_AD_UNIT_ID = new Dimension(_PARENT_AD_UNIT_ID);
public static final Dimension PARENT_AD_UNIT_NAME = new Dimension(_PARENT_AD_UNIT_NAME);
public static final Dimension PLACEMENT_ID = new Dimension(_PLACEMENT_ID);
public static final Dimension PLACEMENT_NAME = new Dimension(_PLACEMENT_NAME);
public static final Dimension PLACEMENT_STATUS = new Dimension(_PLACEMENT_STATUS);
public static final Dimension TARGETING = new Dimension(_TARGETING);
public static final Dimension BROWSER_NAME = new Dimension(_BROWSER_NAME);
public static final Dimension DEVICE_CATEGORY_ID = new Dimension(_DEVICE_CATEGORY_ID);
public static final Dimension DEVICE_CATEGORY_NAME = new Dimension(_DEVICE_CATEGORY_NAME);
public static final Dimension COUNTRY_CRITERIA_ID = new Dimension(_COUNTRY_CRITERIA_ID);
public static final Dimension COUNTRY_CODE = new Dimension(_COUNTRY_CODE);
public static final Dimension COUNTRY_NAME = new Dimension(_COUNTRY_NAME);
public static final Dimension REGION_CRITERIA_ID = new Dimension(_REGION_CRITERIA_ID);
public static final Dimension REGION_NAME = new Dimension(_REGION_NAME);
public static final Dimension CITY_CRITERIA_ID = new Dimension(_CITY_CRITERIA_ID);
public static final Dimension CITY_NAME = new Dimension(_CITY_NAME);
public static final Dimension METRO_CRITERIA_ID = new Dimension(_METRO_CRITERIA_ID);
public static final Dimension METRO_NAME = new Dimension(_METRO_NAME);
public static final Dimension POSTAL_CODE_CRITERIA_ID = new Dimension(_POSTAL_CODE_CRITERIA_ID);
public static final Dimension POSTAL_CODE = new Dimension(_POSTAL_CODE);
public static final Dimension CUSTOM_TARGETING_VALUE_ID = new Dimension(_CUSTOM_TARGETING_VALUE_ID);
public static final Dimension CUSTOM_CRITERIA = new Dimension(_CUSTOM_CRITERIA);
public static final Dimension CONTENT_ID = new Dimension(_CONTENT_ID);
public static final Dimension CONTENT_NAME = new Dimension(_CONTENT_NAME);
public static final Dimension CONTENT_BUNDLE_ID = new Dimension(_CONTENT_BUNDLE_ID);
public static final Dimension CONTENT_BUNDLE_NAME = new Dimension(_CONTENT_BUNDLE_NAME);
public static final Dimension CMS_METADATA = new Dimension(_CMS_METADATA);
public static final Dimension VIDEO_FALLBACK_POSITION = new Dimension(_VIDEO_FALLBACK_POSITION);
public static final Dimension POSITION_OF_POD = new Dimension(_POSITION_OF_POD);
public static final Dimension POSITION_IN_POD = new Dimension(_POSITION_IN_POD);
public static final Dimension CUSTOM_SPOT_ID = new Dimension(_CUSTOM_SPOT_ID);
public static final Dimension CUSTOM_SPOT_NAME = new Dimension(_CUSTOM_SPOT_NAME);
public static final Dimension VIDEO_REDIRECT_THIRD_PARTY = new Dimension(_VIDEO_REDIRECT_THIRD_PARTY);
public static final Dimension VIDEO_BREAK_TYPE = new Dimension(_VIDEO_BREAK_TYPE);
public static final Dimension VIDEO_BREAK_TYPE_NAME = new Dimension(_VIDEO_BREAK_TYPE_NAME);
public static final Dimension VIDEO_VAST_VERSION = new Dimension(_VIDEO_VAST_VERSION);
public static final Dimension VIDEO_AD_REQUEST_DURATION_ID = new Dimension(_VIDEO_AD_REQUEST_DURATION_ID);
public static final Dimension VIDEO_AD_REQUEST_DURATION = new Dimension(_VIDEO_AD_REQUEST_DURATION);
public static final Dimension VIDEO_PLCMT_ID = new Dimension(_VIDEO_PLCMT_ID);
public static final Dimension VIDEO_PLCMT_NAME = new Dimension(_VIDEO_PLCMT_NAME);
public static final Dimension INVENTORY_FORMAT = new Dimension(_INVENTORY_FORMAT);
public static final Dimension INVENTORY_FORMAT_NAME = new Dimension(_INVENTORY_FORMAT_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_NAME);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_ID = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_ID);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_NAME = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_ID = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_ID);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_NAME = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_OUTCOME = new Dimension(_INVENTORY_SHARE_OUTCOME);
public static final Dimension GRP_DEMOGRAPHICS = new Dimension(_GRP_DEMOGRAPHICS);
public static final Dimension AD_REQUEST_AD_UNIT_SIZES = new Dimension(_AD_REQUEST_AD_UNIT_SIZES);
public static final Dimension AD_REQUEST_CUSTOM_CRITERIA = new Dimension(_AD_REQUEST_CUSTOM_CRITERIA);
public static final Dimension IS_FIRST_LOOK_DEAL = new Dimension(_IS_FIRST_LOOK_DEAL);
public static final Dimension IS_ADX_DIRECT = new Dimension(_IS_ADX_DIRECT);
public static final Dimension YIELD_GROUP_ID = new Dimension(_YIELD_GROUP_ID);
public static final Dimension YIELD_GROUP_NAME = new Dimension(_YIELD_GROUP_NAME);
public static final Dimension YIELD_PARTNER = new Dimension(_YIELD_PARTNER);
public static final Dimension YIELD_PARTNER_TAG = new Dimension(_YIELD_PARTNER_TAG);
public static final Dimension EXCHANGE_BIDDING_DEAL_ID = new Dimension(_EXCHANGE_BIDDING_DEAL_ID);
public static final Dimension EXCHANGE_BIDDING_DEAL_TYPE = new Dimension(_EXCHANGE_BIDDING_DEAL_TYPE);
public static final Dimension CLASSIFIED_ADVERTISER_ID = new Dimension(_CLASSIFIED_ADVERTISER_ID);
public static final Dimension CLASSIFIED_ADVERTISER_NAME = new Dimension(_CLASSIFIED_ADVERTISER_NAME);
public static final Dimension CLASSIFIED_BRAND_ID = new Dimension(_CLASSIFIED_BRAND_ID);
public static final Dimension CLASSIFIED_BRAND_NAME = new Dimension(_CLASSIFIED_BRAND_NAME);
public static final Dimension MEDIATION_TYPE = new Dimension(_MEDIATION_TYPE);
public static final Dimension NATIVE_TEMPLATE_ID = new Dimension(_NATIVE_TEMPLATE_ID);
public static final Dimension NATIVE_TEMPLATE_NAME = new Dimension(_NATIVE_TEMPLATE_NAME);
public static final Dimension NATIVE_STYLE_ID = new Dimension(_NATIVE_STYLE_ID);
public static final Dimension NATIVE_STYLE_NAME = new Dimension(_NATIVE_STYLE_NAME);
public static final Dimension CHILD_NETWORK_CODE = new Dimension(_CHILD_NETWORK_CODE);
public static final Dimension MOBILE_APP_RESOLVED_ID = new Dimension(_MOBILE_APP_RESOLVED_ID);
public static final Dimension MOBILE_APP_NAME = new Dimension(_MOBILE_APP_NAME);
public static final Dimension MOBILE_DEVICE_NAME = new Dimension(_MOBILE_DEVICE_NAME);
public static final Dimension MOBILE_INVENTORY_TYPE = new Dimension(_MOBILE_INVENTORY_TYPE);
public static final Dimension OPERATING_SYSTEM_VERSION_ID = new Dimension(_OPERATING_SYSTEM_VERSION_ID);
public static final Dimension OPERATING_SYSTEM_VERSION_NAME = new Dimension(_OPERATING_SYSTEM_VERSION_NAME);
public static final Dimension REQUEST_TYPE = new Dimension(_REQUEST_TYPE);
public static final Dimension AD_UNIT_STATUS = new Dimension(_AD_UNIT_STATUS);
public static final Dimension MASTER_COMPANION_CREATIVE_ID = new Dimension(_MASTER_COMPANION_CREATIVE_ID);
public static final Dimension MASTER_COMPANION_CREATIVE_NAME = new Dimension(_MASTER_COMPANION_CREATIVE_NAME);
public static final Dimension AUDIENCE_SEGMENT_ID = new Dimension(_AUDIENCE_SEGMENT_ID);
public static final Dimension AUDIENCE_SEGMENT_NAME = new Dimension(_AUDIENCE_SEGMENT_NAME);
public static final Dimension AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = new Dimension(_AUDIENCE_SEGMENT_DATA_PROVIDER_NAME);
public static final Dimension WEB_PROPERTY_CODE = new Dimension(_WEB_PROPERTY_CODE);
public static final Dimension BUYING_AGENCY_NAME = new Dimension(_BUYING_AGENCY_NAME);
public static final Dimension BUYER_NETWORK_ID = new Dimension(_BUYER_NETWORK_ID);
public static final Dimension BUYER_NETWORK_NAME = new Dimension(_BUYER_NETWORK_NAME);
public static final Dimension BIDDER_ID = new Dimension(_BIDDER_ID);
public static final Dimension BIDDER_NAME = new Dimension(_BIDDER_NAME);
public static final Dimension ADVERTISER_DOMAIN_NAME = new Dimension(_ADVERTISER_DOMAIN_NAME);
public static final Dimension AD_EXCHANGE_OPTIMIZATION_TYPE = new Dimension(_AD_EXCHANGE_OPTIMIZATION_TYPE);
public static final Dimension ADVERTISER_VERTICAL_NAME = new Dimension(_ADVERTISER_VERTICAL_NAME);
public static final Dimension NIELSEN_SEGMENT = new Dimension(_NIELSEN_SEGMENT);
public static final Dimension NIELSEN_DEMOGRAPHICS = new Dimension(_NIELSEN_DEMOGRAPHICS);
public static final Dimension NIELSEN_RESTATEMENT_DATE = new Dimension(_NIELSEN_RESTATEMENT_DATE);
public static final Dimension NIELSEN_DEVICE_ID = new Dimension(_NIELSEN_DEVICE_ID);
public static final Dimension NIELSEN_DEVICE_NAME = new Dimension(_NIELSEN_DEVICE_NAME);
public static final Dimension PROGRAMMATIC_BUYER_ID = new Dimension(_PROGRAMMATIC_BUYER_ID);
public static final Dimension PROGRAMMATIC_BUYER_NAME = new Dimension(_PROGRAMMATIC_BUYER_NAME);
public static final Dimension REQUESTED_AD_SIZES = new Dimension(_REQUESTED_AD_SIZES);
public static final Dimension CREATIVE_SIZE_DELIVERED = new Dimension(_CREATIVE_SIZE_DELIVERED);
public static final Dimension PROGRAMMATIC_CHANNEL_ID = new Dimension(_PROGRAMMATIC_CHANNEL_ID);
public static final Dimension PROGRAMMATIC_CHANNEL_NAME = new Dimension(_PROGRAMMATIC_CHANNEL_NAME);
public static final Dimension CLASSIFIED_YIELD_PARTNER_NAME = new Dimension(_CLASSIFIED_YIELD_PARTNER_NAME);
public static final Dimension DP_DATE = new Dimension(_DP_DATE);
public static final Dimension DP_WEEK = new Dimension(_DP_WEEK);
public static final Dimension DP_MONTH_YEAR = new Dimension(_DP_MONTH_YEAR);
public static final Dimension DP_COUNTRY_CRITERIA_ID = new Dimension(_DP_COUNTRY_CRITERIA_ID);
public static final Dimension DP_COUNTRY_NAME = new Dimension(_DP_COUNTRY_NAME);
public static final Dimension DP_INVENTORY_TYPE = new Dimension(_DP_INVENTORY_TYPE);
public static final Dimension DP_CREATIVE_SIZE = new Dimension(_DP_CREATIVE_SIZE);
public static final Dimension DP_BRAND_NAME = new Dimension(_DP_BRAND_NAME);
public static final Dimension DP_ADVERTISER_NAME = new Dimension(_DP_ADVERTISER_NAME);
public static final Dimension DP_ADX_BUYER_NETWORK_NAME = new Dimension(_DP_ADX_BUYER_NETWORK_NAME);
public static final Dimension DP_MOBILE_DEVICE_NAME = new Dimension(_DP_MOBILE_DEVICE_NAME);
public static final Dimension DP_DEVICE_CATEGORY_NAME = new Dimension(_DP_DEVICE_CATEGORY_NAME);
public static final Dimension DP_TAG_ID = new Dimension(_DP_TAG_ID);
public static final Dimension DP_DEAL_ID = new Dimension(_DP_DEAL_ID);
public static final Dimension DP_APP_ID = new Dimension(_DP_APP_ID);
public static final Dimension CUSTOM_DIMENSION = new Dimension(_CUSTOM_DIMENSION);
public static final Dimension DEMAND_CHANNEL_ID = new Dimension(_DEMAND_CHANNEL_ID);
public static final Dimension DEMAND_CHANNEL_NAME = new Dimension(_DEMAND_CHANNEL_NAME);
public static final Dimension DOMAIN = new Dimension(_DOMAIN);
public static final Dimension SERVING_RESTRICTION_ID = new Dimension(_SERVING_RESTRICTION_ID);
public static final Dimension SERVING_RESTRICTION_NAME = new Dimension(_SERVING_RESTRICTION_NAME);
public static final Dimension UNIFIED_PRICING_RULE_ID = new Dimension(_UNIFIED_PRICING_RULE_ID);
public static final Dimension UNIFIED_PRICING_RULE_NAME = new Dimension(_UNIFIED_PRICING_RULE_NAME);
public static final Dimension FIRST_LOOK_PRICING_RULE_ID = new Dimension(_FIRST_LOOK_PRICING_RULE_ID);
public static final Dimension FIRST_LOOK_PRICING_RULE_NAME = new Dimension(_FIRST_LOOK_PRICING_RULE_NAME);
public static final Dimension BID_RANGE = new Dimension(_BID_RANGE);
public static final Dimension BID_REJECTION_REASON = new Dimension(_BID_REJECTION_REASON);
public static final Dimension BID_REJECTION_REASON_NAME = new Dimension(_BID_REJECTION_REASON_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_DOMAIN = new Dimension(_AD_TECHNOLOGY_PROVIDER_DOMAIN);
public static final Dimension PROGRAMMATIC_DEAL_ID = new Dimension(_PROGRAMMATIC_DEAL_ID);
public static final Dimension PROGRAMMATIC_DEAL_NAME = new Dimension(_PROGRAMMATIC_DEAL_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_ID = new Dimension(_AD_TECHNOLOGY_PROVIDER_ID);
public static final Dimension AD_TECHNOLOGY_PROVIDER_NAME = new Dimension(_AD_TECHNOLOGY_PROVIDER_NAME);
public static final Dimension TCF_VENDOR_ID = new Dimension(_TCF_VENDOR_ID);
public static final Dimension TCF_VENDOR_NAME = new Dimension(_TCF_VENDOR_NAME);
public static final Dimension SITE_NAME = new Dimension(_SITE_NAME);
public static final Dimension CHANNEL_NAME = new Dimension(_CHANNEL_NAME);
public static final Dimension URL_ID = new Dimension(_URL_ID);
public static final Dimension URL_NAME = new Dimension(_URL_NAME);
public static final Dimension VIDEO_AD_DURATION = new Dimension(_VIDEO_AD_DURATION);
public static final Dimension VIDEO_AD_TYPE_ID = new Dimension(_VIDEO_AD_TYPE_ID);
public static final Dimension VIDEO_AD_TYPE_NAME = new Dimension(_VIDEO_AD_TYPE_NAME);
public static final Dimension AD_EXCHANGE_PRODUCT_CODE = new Dimension(_AD_EXCHANGE_PRODUCT_CODE);
public static final Dimension AD_EXCHANGE_PRODUCT_NAME = new Dimension(_AD_EXCHANGE_PRODUCT_NAME);
public static final Dimension DYNAMIC_ALLOCATION_ID = new Dimension(_DYNAMIC_ALLOCATION_ID);
public static final Dimension DYNAMIC_ALLOCATION_NAME = new Dimension(_DYNAMIC_ALLOCATION_NAME);
public static final Dimension AD_TYPE_ID = new Dimension(_AD_TYPE_ID);
public static final Dimension AD_TYPE_NAME = new Dimension(_AD_TYPE_NAME);
public static final Dimension AD_LOCATION_ID = new Dimension(_AD_LOCATION_ID);
public static final Dimension AD_LOCATION_NAME = new Dimension(_AD_LOCATION_NAME);
public static final Dimension TARGETING_TYPE_CODE = new Dimension(_TARGETING_TYPE_CODE);
public static final Dimension TARGETING_TYPE_NAME = new Dimension(_TARGETING_TYPE_NAME);
public static final Dimension BRANDING_TYPE_CODE = new Dimension(_BRANDING_TYPE_CODE);
public static final Dimension BRANDING_TYPE_NAME = new Dimension(_BRANDING_TYPE_NAME);
public static final Dimension BANDWIDTH_ID = new Dimension(_BANDWIDTH_ID);
public static final Dimension BANDWIDTH_NAME = new Dimension(_BANDWIDTH_NAME);
public static final Dimension CARRIER_ID = new Dimension(_CARRIER_ID);
public static final Dimension CARRIER_NAME = new Dimension(_CARRIER_NAME);
public java.lang.String getValue() { return _value_;}
public static Dimension fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
Dimension enumeration = (Dimension)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static Dimension fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Dimension.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202505", "Dimension"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
|
googleads/googleads-java-lib | 37,064 | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202508/Dimension.java | // Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Dimension.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4.1-SNAPSHOT Mar 20, 2024 (11:59:10 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202508;
public class Dimension implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected Dimension(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _MONTH_AND_YEAR = "MONTH_AND_YEAR";
public static final java.lang.String _WEEK = "WEEK";
public static final java.lang.String _DATE = "DATE";
public static final java.lang.String _DAY = "DAY";
public static final java.lang.String _HOUR = "HOUR";
public static final java.lang.String _DATE_PT = "DATE_PT";
public static final java.lang.String _WEEK_PT = "WEEK_PT";
public static final java.lang.String _MONTH_YEAR_PT = "MONTH_YEAR_PT";
public static final java.lang.String _DAY_OF_WEEK_PT = "DAY_OF_WEEK_PT";
public static final java.lang.String _LINE_ITEM_ID = "LINE_ITEM_ID";
public static final java.lang.String _LINE_ITEM_NAME = "LINE_ITEM_NAME";
public static final java.lang.String _LINE_ITEM_TYPE = "LINE_ITEM_TYPE";
public static final java.lang.String _ORDER_ID = "ORDER_ID";
public static final java.lang.String _ORDER_NAME = "ORDER_NAME";
public static final java.lang.String _ORDER_DELIVERY_STATUS = "ORDER_DELIVERY_STATUS";
public static final java.lang.String _ADVERTISER_ID = "ADVERTISER_ID";
public static final java.lang.String _ADVERTISER_NAME = "ADVERTISER_NAME";
public static final java.lang.String _AD_NETWORK_ID = "AD_NETWORK_ID";
public static final java.lang.String _AD_NETWORK_NAME = "AD_NETWORK_NAME";
public static final java.lang.String _SALESPERSON_ID = "SALESPERSON_ID";
public static final java.lang.String _SALESPERSON_NAME = "SALESPERSON_NAME";
public static final java.lang.String _CREATIVE_ID = "CREATIVE_ID";
public static final java.lang.String _CREATIVE_NAME = "CREATIVE_NAME";
public static final java.lang.String _CREATIVE_TYPE = "CREATIVE_TYPE";
public static final java.lang.String _CREATIVE_BILLING_TYPE = "CREATIVE_BILLING_TYPE";
public static final java.lang.String _CUSTOM_EVENT_ID = "CUSTOM_EVENT_ID";
public static final java.lang.String _CUSTOM_EVENT_NAME = "CUSTOM_EVENT_NAME";
public static final java.lang.String _CUSTOM_EVENT_TYPE = "CUSTOM_EVENT_TYPE";
public static final java.lang.String _CREATIVE_SIZE = "CREATIVE_SIZE";
public static final java.lang.String _AD_UNIT_ID = "AD_UNIT_ID";
public static final java.lang.String _AD_UNIT_NAME = "AD_UNIT_NAME";
public static final java.lang.String _PARENT_AD_UNIT_ID = "PARENT_AD_UNIT_ID";
public static final java.lang.String _PARENT_AD_UNIT_NAME = "PARENT_AD_UNIT_NAME";
public static final java.lang.String _PLACEMENT_ID = "PLACEMENT_ID";
public static final java.lang.String _PLACEMENT_NAME = "PLACEMENT_NAME";
public static final java.lang.String _PLACEMENT_STATUS = "PLACEMENT_STATUS";
public static final java.lang.String _TARGETING = "TARGETING";
public static final java.lang.String _BROWSER_NAME = "BROWSER_NAME";
public static final java.lang.String _DEVICE_CATEGORY_ID = "DEVICE_CATEGORY_ID";
public static final java.lang.String _DEVICE_CATEGORY_NAME = "DEVICE_CATEGORY_NAME";
public static final java.lang.String _COUNTRY_CRITERIA_ID = "COUNTRY_CRITERIA_ID";
public static final java.lang.String _COUNTRY_CODE = "COUNTRY_CODE";
public static final java.lang.String _COUNTRY_NAME = "COUNTRY_NAME";
public static final java.lang.String _REGION_CRITERIA_ID = "REGION_CRITERIA_ID";
public static final java.lang.String _REGION_NAME = "REGION_NAME";
public static final java.lang.String _CITY_CRITERIA_ID = "CITY_CRITERIA_ID";
public static final java.lang.String _CITY_NAME = "CITY_NAME";
public static final java.lang.String _METRO_CRITERIA_ID = "METRO_CRITERIA_ID";
public static final java.lang.String _METRO_NAME = "METRO_NAME";
public static final java.lang.String _POSTAL_CODE_CRITERIA_ID = "POSTAL_CODE_CRITERIA_ID";
public static final java.lang.String _POSTAL_CODE = "POSTAL_CODE";
public static final java.lang.String _CUSTOM_TARGETING_VALUE_ID = "CUSTOM_TARGETING_VALUE_ID";
public static final java.lang.String _CUSTOM_CRITERIA = "CUSTOM_CRITERIA";
public static final java.lang.String _CONTENT_ID = "CONTENT_ID";
public static final java.lang.String _CONTENT_NAME = "CONTENT_NAME";
public static final java.lang.String _CONTENT_BUNDLE_ID = "CONTENT_BUNDLE_ID";
public static final java.lang.String _CONTENT_BUNDLE_NAME = "CONTENT_BUNDLE_NAME";
public static final java.lang.String _CMS_METADATA = "CMS_METADATA";
public static final java.lang.String _VIDEO_FALLBACK_POSITION = "VIDEO_FALLBACK_POSITION";
public static final java.lang.String _POSITION_OF_POD = "POSITION_OF_POD";
public static final java.lang.String _POSITION_IN_POD = "POSITION_IN_POD";
public static final java.lang.String _CUSTOM_SPOT_ID = "CUSTOM_SPOT_ID";
public static final java.lang.String _CUSTOM_SPOT_NAME = "CUSTOM_SPOT_NAME";
public static final java.lang.String _VIDEO_REDIRECT_THIRD_PARTY = "VIDEO_REDIRECT_THIRD_PARTY";
public static final java.lang.String _VIDEO_BREAK_TYPE = "VIDEO_BREAK_TYPE";
public static final java.lang.String _VIDEO_BREAK_TYPE_NAME = "VIDEO_BREAK_TYPE_NAME";
public static final java.lang.String _VIDEO_VAST_VERSION = "VIDEO_VAST_VERSION";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION_ID = "VIDEO_AD_REQUEST_DURATION_ID";
public static final java.lang.String _VIDEO_AD_REQUEST_DURATION = "VIDEO_AD_REQUEST_DURATION";
public static final java.lang.String _VIDEO_PLCMT_ID = "VIDEO_PLCMT_ID";
public static final java.lang.String _VIDEO_PLCMT_NAME = "VIDEO_PLCMT_NAME";
public static final java.lang.String _INVENTORY_FORMAT = "INVENTORY_FORMAT";
public static final java.lang.String _INVENTORY_FORMAT_NAME = "INVENTORY_FORMAT_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_ID = "PARTNER_MANAGEMENT_PARTNER_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_NAME = "PARTNER_MANAGEMENT_PARTNER_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_ID = "PARTNER_MANAGEMENT_PARTNER_LABEL_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = "PARTNER_MANAGEMENT_PARTNER_LABEL_NAME";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_ID = "PARTNER_MANAGEMENT_ASSIGNMENT_ID";
public static final java.lang.String _PARTNER_MANAGEMENT_ASSIGNMENT_NAME = "PARTNER_MANAGEMENT_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_ID = "INVENTORY_SHARE_ASSIGNMENT_ID";
public static final java.lang.String _INVENTORY_SHARE_ASSIGNMENT_NAME = "INVENTORY_SHARE_ASSIGNMENT_NAME";
public static final java.lang.String _INVENTORY_SHARE_OUTCOME = "INVENTORY_SHARE_OUTCOME";
public static final java.lang.String _GRP_DEMOGRAPHICS = "GRP_DEMOGRAPHICS";
public static final java.lang.String _AD_REQUEST_AD_UNIT_SIZES = "AD_REQUEST_AD_UNIT_SIZES";
public static final java.lang.String _AD_REQUEST_CUSTOM_CRITERIA = "AD_REQUEST_CUSTOM_CRITERIA";
public static final java.lang.String _IS_FIRST_LOOK_DEAL = "IS_FIRST_LOOK_DEAL";
public static final java.lang.String _IS_ADX_DIRECT = "IS_ADX_DIRECT";
public static final java.lang.String _YIELD_GROUP_ID = "YIELD_GROUP_ID";
public static final java.lang.String _YIELD_GROUP_NAME = "YIELD_GROUP_NAME";
public static final java.lang.String _YIELD_PARTNER = "YIELD_PARTNER";
public static final java.lang.String _YIELD_PARTNER_TAG = "YIELD_PARTNER_TAG";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_ID = "EXCHANGE_BIDDING_DEAL_ID";
public static final java.lang.String _EXCHANGE_BIDDING_DEAL_TYPE = "EXCHANGE_BIDDING_DEAL_TYPE";
public static final java.lang.String _CLASSIFIED_ADVERTISER_ID = "CLASSIFIED_ADVERTISER_ID";
public static final java.lang.String _CLASSIFIED_ADVERTISER_NAME = "CLASSIFIED_ADVERTISER_NAME";
public static final java.lang.String _CLASSIFIED_BRAND_ID = "CLASSIFIED_BRAND_ID";
public static final java.lang.String _CLASSIFIED_BRAND_NAME = "CLASSIFIED_BRAND_NAME";
public static final java.lang.String _MEDIATION_TYPE = "MEDIATION_TYPE";
public static final java.lang.String _NATIVE_TEMPLATE_ID = "NATIVE_TEMPLATE_ID";
public static final java.lang.String _NATIVE_TEMPLATE_NAME = "NATIVE_TEMPLATE_NAME";
public static final java.lang.String _NATIVE_STYLE_ID = "NATIVE_STYLE_ID";
public static final java.lang.String _NATIVE_STYLE_NAME = "NATIVE_STYLE_NAME";
public static final java.lang.String _CHILD_NETWORK_CODE = "CHILD_NETWORK_CODE";
public static final java.lang.String _MOBILE_APP_RESOLVED_ID = "MOBILE_APP_RESOLVED_ID";
public static final java.lang.String _MOBILE_APP_NAME = "MOBILE_APP_NAME";
public static final java.lang.String _MOBILE_DEVICE_NAME = "MOBILE_DEVICE_NAME";
public static final java.lang.String _MOBILE_INVENTORY_TYPE = "MOBILE_INVENTORY_TYPE";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_ID = "OPERATING_SYSTEM_VERSION_ID";
public static final java.lang.String _OPERATING_SYSTEM_VERSION_NAME = "OPERATING_SYSTEM_VERSION_NAME";
public static final java.lang.String _REQUEST_TYPE = "REQUEST_TYPE";
public static final java.lang.String _AD_UNIT_STATUS = "AD_UNIT_STATUS";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_ID = "MASTER_COMPANION_CREATIVE_ID";
public static final java.lang.String _MASTER_COMPANION_CREATIVE_NAME = "MASTER_COMPANION_CREATIVE_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_ID = "AUDIENCE_SEGMENT_ID";
public static final java.lang.String _AUDIENCE_SEGMENT_NAME = "AUDIENCE_SEGMENT_NAME";
public static final java.lang.String _AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = "AUDIENCE_SEGMENT_DATA_PROVIDER_NAME";
public static final java.lang.String _WEB_PROPERTY_CODE = "WEB_PROPERTY_CODE";
public static final java.lang.String _BUYING_AGENCY_NAME = "BUYING_AGENCY_NAME";
public static final java.lang.String _BUYER_NETWORK_ID = "BUYER_NETWORK_ID";
public static final java.lang.String _BUYER_NETWORK_NAME = "BUYER_NETWORK_NAME";
public static final java.lang.String _BIDDER_ID = "BIDDER_ID";
public static final java.lang.String _BIDDER_NAME = "BIDDER_NAME";
public static final java.lang.String _ADVERTISER_DOMAIN_NAME = "ADVERTISER_DOMAIN_NAME";
public static final java.lang.String _AD_EXCHANGE_OPTIMIZATION_TYPE = "AD_EXCHANGE_OPTIMIZATION_TYPE";
public static final java.lang.String _ADVERTISER_VERTICAL_NAME = "ADVERTISER_VERTICAL_NAME";
public static final java.lang.String _NIELSEN_SEGMENT = "NIELSEN_SEGMENT";
public static final java.lang.String _NIELSEN_DEMOGRAPHICS = "NIELSEN_DEMOGRAPHICS";
public static final java.lang.String _NIELSEN_RESTATEMENT_DATE = "NIELSEN_RESTATEMENT_DATE";
public static final java.lang.String _NIELSEN_DEVICE_ID = "NIELSEN_DEVICE_ID";
public static final java.lang.String _NIELSEN_DEVICE_NAME = "NIELSEN_DEVICE_NAME";
public static final java.lang.String _PROGRAMMATIC_BUYER_ID = "PROGRAMMATIC_BUYER_ID";
public static final java.lang.String _PROGRAMMATIC_BUYER_NAME = "PROGRAMMATIC_BUYER_NAME";
public static final java.lang.String _REQUESTED_AD_SIZES = "REQUESTED_AD_SIZES";
public static final java.lang.String _CREATIVE_SIZE_DELIVERED = "CREATIVE_SIZE_DELIVERED";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_ID = "PROGRAMMATIC_CHANNEL_ID";
public static final java.lang.String _PROGRAMMATIC_CHANNEL_NAME = "PROGRAMMATIC_CHANNEL_NAME";
public static final java.lang.String _CLASSIFIED_YIELD_PARTNER_NAME = "CLASSIFIED_YIELD_PARTNER_NAME";
public static final java.lang.String _DP_DATE = "DP_DATE";
public static final java.lang.String _DP_WEEK = "DP_WEEK";
public static final java.lang.String _DP_MONTH_YEAR = "DP_MONTH_YEAR";
public static final java.lang.String _DP_COUNTRY_CRITERIA_ID = "DP_COUNTRY_CRITERIA_ID";
public static final java.lang.String _DP_COUNTRY_NAME = "DP_COUNTRY_NAME";
public static final java.lang.String _DP_INVENTORY_TYPE = "DP_INVENTORY_TYPE";
public static final java.lang.String _DP_CREATIVE_SIZE = "DP_CREATIVE_SIZE";
public static final java.lang.String _DP_BRAND_NAME = "DP_BRAND_NAME";
public static final java.lang.String _DP_ADVERTISER_NAME = "DP_ADVERTISER_NAME";
public static final java.lang.String _DP_ADX_BUYER_NETWORK_NAME = "DP_ADX_BUYER_NETWORK_NAME";
public static final java.lang.String _DP_MOBILE_DEVICE_NAME = "DP_MOBILE_DEVICE_NAME";
public static final java.lang.String _DP_DEVICE_CATEGORY_NAME = "DP_DEVICE_CATEGORY_NAME";
public static final java.lang.String _DP_TAG_ID = "DP_TAG_ID";
public static final java.lang.String _DP_DEAL_ID = "DP_DEAL_ID";
public static final java.lang.String _DP_APP_ID = "DP_APP_ID";
public static final java.lang.String _CUSTOM_DIMENSION = "CUSTOM_DIMENSION";
public static final java.lang.String _DEMAND_CHANNEL_ID = "DEMAND_CHANNEL_ID";
public static final java.lang.String _DEMAND_CHANNEL_NAME = "DEMAND_CHANNEL_NAME";
public static final java.lang.String _DOMAIN = "DOMAIN";
public static final java.lang.String _SERVING_RESTRICTION_ID = "SERVING_RESTRICTION_ID";
public static final java.lang.String _SERVING_RESTRICTION_NAME = "SERVING_RESTRICTION_NAME";
public static final java.lang.String _UNIFIED_PRICING_RULE_ID = "UNIFIED_PRICING_RULE_ID";
public static final java.lang.String _UNIFIED_PRICING_RULE_NAME = "UNIFIED_PRICING_RULE_NAME";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_ID = "FIRST_LOOK_PRICING_RULE_ID";
public static final java.lang.String _FIRST_LOOK_PRICING_RULE_NAME = "FIRST_LOOK_PRICING_RULE_NAME";
public static final java.lang.String _BID_RANGE = "BID_RANGE";
public static final java.lang.String _BID_REJECTION_REASON = "BID_REJECTION_REASON";
public static final java.lang.String _BID_REJECTION_REASON_NAME = "BID_REJECTION_REASON_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_DOMAIN = "AD_TECHNOLOGY_PROVIDER_DOMAIN";
public static final java.lang.String _PROGRAMMATIC_DEAL_ID = "PROGRAMMATIC_DEAL_ID";
public static final java.lang.String _PROGRAMMATIC_DEAL_NAME = "PROGRAMMATIC_DEAL_NAME";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_ID = "AD_TECHNOLOGY_PROVIDER_ID";
public static final java.lang.String _AD_TECHNOLOGY_PROVIDER_NAME = "AD_TECHNOLOGY_PROVIDER_NAME";
public static final java.lang.String _TCF_VENDOR_ID = "TCF_VENDOR_ID";
public static final java.lang.String _TCF_VENDOR_NAME = "TCF_VENDOR_NAME";
public static final java.lang.String _SITE_NAME = "SITE_NAME";
public static final java.lang.String _CHANNEL_NAME = "CHANNEL_NAME";
public static final java.lang.String _URL_ID = "URL_ID";
public static final java.lang.String _URL_NAME = "URL_NAME";
public static final java.lang.String _VIDEO_AD_DURATION = "VIDEO_AD_DURATION";
public static final java.lang.String _VIDEO_AD_TYPE_ID = "VIDEO_AD_TYPE_ID";
public static final java.lang.String _VIDEO_AD_TYPE_NAME = "VIDEO_AD_TYPE_NAME";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_CODE = "AD_EXCHANGE_PRODUCT_CODE";
public static final java.lang.String _AD_EXCHANGE_PRODUCT_NAME = "AD_EXCHANGE_PRODUCT_NAME";
public static final java.lang.String _DYNAMIC_ALLOCATION_ID = "DYNAMIC_ALLOCATION_ID";
public static final java.lang.String _DYNAMIC_ALLOCATION_NAME = "DYNAMIC_ALLOCATION_NAME";
public static final java.lang.String _AD_TYPE_ID = "AD_TYPE_ID";
public static final java.lang.String _AD_TYPE_NAME = "AD_TYPE_NAME";
public static final java.lang.String _AD_LOCATION_ID = "AD_LOCATION_ID";
public static final java.lang.String _AD_LOCATION_NAME = "AD_LOCATION_NAME";
public static final java.lang.String _TARGETING_TYPE_CODE = "TARGETING_TYPE_CODE";
public static final java.lang.String _TARGETING_TYPE_NAME = "TARGETING_TYPE_NAME";
public static final java.lang.String _BRANDING_TYPE_CODE = "BRANDING_TYPE_CODE";
public static final java.lang.String _BRANDING_TYPE_NAME = "BRANDING_TYPE_NAME";
public static final java.lang.String _BANDWIDTH_ID = "BANDWIDTH_ID";
public static final java.lang.String _BANDWIDTH_NAME = "BANDWIDTH_NAME";
public static final java.lang.String _CARRIER_ID = "CARRIER_ID";
public static final java.lang.String _CARRIER_NAME = "CARRIER_NAME";
public static final Dimension MONTH_AND_YEAR = new Dimension(_MONTH_AND_YEAR);
public static final Dimension WEEK = new Dimension(_WEEK);
public static final Dimension DATE = new Dimension(_DATE);
public static final Dimension DAY = new Dimension(_DAY);
public static final Dimension HOUR = new Dimension(_HOUR);
public static final Dimension DATE_PT = new Dimension(_DATE_PT);
public static final Dimension WEEK_PT = new Dimension(_WEEK_PT);
public static final Dimension MONTH_YEAR_PT = new Dimension(_MONTH_YEAR_PT);
public static final Dimension DAY_OF_WEEK_PT = new Dimension(_DAY_OF_WEEK_PT);
public static final Dimension LINE_ITEM_ID = new Dimension(_LINE_ITEM_ID);
public static final Dimension LINE_ITEM_NAME = new Dimension(_LINE_ITEM_NAME);
public static final Dimension LINE_ITEM_TYPE = new Dimension(_LINE_ITEM_TYPE);
public static final Dimension ORDER_ID = new Dimension(_ORDER_ID);
public static final Dimension ORDER_NAME = new Dimension(_ORDER_NAME);
public static final Dimension ORDER_DELIVERY_STATUS = new Dimension(_ORDER_DELIVERY_STATUS);
public static final Dimension ADVERTISER_ID = new Dimension(_ADVERTISER_ID);
public static final Dimension ADVERTISER_NAME = new Dimension(_ADVERTISER_NAME);
public static final Dimension AD_NETWORK_ID = new Dimension(_AD_NETWORK_ID);
public static final Dimension AD_NETWORK_NAME = new Dimension(_AD_NETWORK_NAME);
public static final Dimension SALESPERSON_ID = new Dimension(_SALESPERSON_ID);
public static final Dimension SALESPERSON_NAME = new Dimension(_SALESPERSON_NAME);
public static final Dimension CREATIVE_ID = new Dimension(_CREATIVE_ID);
public static final Dimension CREATIVE_NAME = new Dimension(_CREATIVE_NAME);
public static final Dimension CREATIVE_TYPE = new Dimension(_CREATIVE_TYPE);
public static final Dimension CREATIVE_BILLING_TYPE = new Dimension(_CREATIVE_BILLING_TYPE);
public static final Dimension CUSTOM_EVENT_ID = new Dimension(_CUSTOM_EVENT_ID);
public static final Dimension CUSTOM_EVENT_NAME = new Dimension(_CUSTOM_EVENT_NAME);
public static final Dimension CUSTOM_EVENT_TYPE = new Dimension(_CUSTOM_EVENT_TYPE);
public static final Dimension CREATIVE_SIZE = new Dimension(_CREATIVE_SIZE);
public static final Dimension AD_UNIT_ID = new Dimension(_AD_UNIT_ID);
public static final Dimension AD_UNIT_NAME = new Dimension(_AD_UNIT_NAME);
public static final Dimension PARENT_AD_UNIT_ID = new Dimension(_PARENT_AD_UNIT_ID);
public static final Dimension PARENT_AD_UNIT_NAME = new Dimension(_PARENT_AD_UNIT_NAME);
public static final Dimension PLACEMENT_ID = new Dimension(_PLACEMENT_ID);
public static final Dimension PLACEMENT_NAME = new Dimension(_PLACEMENT_NAME);
public static final Dimension PLACEMENT_STATUS = new Dimension(_PLACEMENT_STATUS);
public static final Dimension TARGETING = new Dimension(_TARGETING);
public static final Dimension BROWSER_NAME = new Dimension(_BROWSER_NAME);
public static final Dimension DEVICE_CATEGORY_ID = new Dimension(_DEVICE_CATEGORY_ID);
public static final Dimension DEVICE_CATEGORY_NAME = new Dimension(_DEVICE_CATEGORY_NAME);
public static final Dimension COUNTRY_CRITERIA_ID = new Dimension(_COUNTRY_CRITERIA_ID);
public static final Dimension COUNTRY_CODE = new Dimension(_COUNTRY_CODE);
public static final Dimension COUNTRY_NAME = new Dimension(_COUNTRY_NAME);
public static final Dimension REGION_CRITERIA_ID = new Dimension(_REGION_CRITERIA_ID);
public static final Dimension REGION_NAME = new Dimension(_REGION_NAME);
public static final Dimension CITY_CRITERIA_ID = new Dimension(_CITY_CRITERIA_ID);
public static final Dimension CITY_NAME = new Dimension(_CITY_NAME);
public static final Dimension METRO_CRITERIA_ID = new Dimension(_METRO_CRITERIA_ID);
public static final Dimension METRO_NAME = new Dimension(_METRO_NAME);
public static final Dimension POSTAL_CODE_CRITERIA_ID = new Dimension(_POSTAL_CODE_CRITERIA_ID);
public static final Dimension POSTAL_CODE = new Dimension(_POSTAL_CODE);
public static final Dimension CUSTOM_TARGETING_VALUE_ID = new Dimension(_CUSTOM_TARGETING_VALUE_ID);
public static final Dimension CUSTOM_CRITERIA = new Dimension(_CUSTOM_CRITERIA);
public static final Dimension CONTENT_ID = new Dimension(_CONTENT_ID);
public static final Dimension CONTENT_NAME = new Dimension(_CONTENT_NAME);
public static final Dimension CONTENT_BUNDLE_ID = new Dimension(_CONTENT_BUNDLE_ID);
public static final Dimension CONTENT_BUNDLE_NAME = new Dimension(_CONTENT_BUNDLE_NAME);
public static final Dimension CMS_METADATA = new Dimension(_CMS_METADATA);
public static final Dimension VIDEO_FALLBACK_POSITION = new Dimension(_VIDEO_FALLBACK_POSITION);
public static final Dimension POSITION_OF_POD = new Dimension(_POSITION_OF_POD);
public static final Dimension POSITION_IN_POD = new Dimension(_POSITION_IN_POD);
public static final Dimension CUSTOM_SPOT_ID = new Dimension(_CUSTOM_SPOT_ID);
public static final Dimension CUSTOM_SPOT_NAME = new Dimension(_CUSTOM_SPOT_NAME);
public static final Dimension VIDEO_REDIRECT_THIRD_PARTY = new Dimension(_VIDEO_REDIRECT_THIRD_PARTY);
public static final Dimension VIDEO_BREAK_TYPE = new Dimension(_VIDEO_BREAK_TYPE);
public static final Dimension VIDEO_BREAK_TYPE_NAME = new Dimension(_VIDEO_BREAK_TYPE_NAME);
public static final Dimension VIDEO_VAST_VERSION = new Dimension(_VIDEO_VAST_VERSION);
public static final Dimension VIDEO_AD_REQUEST_DURATION_ID = new Dimension(_VIDEO_AD_REQUEST_DURATION_ID);
public static final Dimension VIDEO_AD_REQUEST_DURATION = new Dimension(_VIDEO_AD_REQUEST_DURATION);
public static final Dimension VIDEO_PLCMT_ID = new Dimension(_VIDEO_PLCMT_ID);
public static final Dimension VIDEO_PLCMT_NAME = new Dimension(_VIDEO_PLCMT_NAME);
public static final Dimension INVENTORY_FORMAT = new Dimension(_INVENTORY_FORMAT);
public static final Dimension INVENTORY_FORMAT_NAME = new Dimension(_INVENTORY_FORMAT_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_NAME);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_ID = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_ID);
public static final Dimension PARTNER_MANAGEMENT_PARTNER_LABEL_NAME = new Dimension(_PARTNER_MANAGEMENT_PARTNER_LABEL_NAME);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_ID = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_ID);
public static final Dimension PARTNER_MANAGEMENT_ASSIGNMENT_NAME = new Dimension(_PARTNER_MANAGEMENT_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_ID = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_ID);
public static final Dimension INVENTORY_SHARE_ASSIGNMENT_NAME = new Dimension(_INVENTORY_SHARE_ASSIGNMENT_NAME);
public static final Dimension INVENTORY_SHARE_OUTCOME = new Dimension(_INVENTORY_SHARE_OUTCOME);
public static final Dimension GRP_DEMOGRAPHICS = new Dimension(_GRP_DEMOGRAPHICS);
public static final Dimension AD_REQUEST_AD_UNIT_SIZES = new Dimension(_AD_REQUEST_AD_UNIT_SIZES);
public static final Dimension AD_REQUEST_CUSTOM_CRITERIA = new Dimension(_AD_REQUEST_CUSTOM_CRITERIA);
public static final Dimension IS_FIRST_LOOK_DEAL = new Dimension(_IS_FIRST_LOOK_DEAL);
public static final Dimension IS_ADX_DIRECT = new Dimension(_IS_ADX_DIRECT);
public static final Dimension YIELD_GROUP_ID = new Dimension(_YIELD_GROUP_ID);
public static final Dimension YIELD_GROUP_NAME = new Dimension(_YIELD_GROUP_NAME);
public static final Dimension YIELD_PARTNER = new Dimension(_YIELD_PARTNER);
public static final Dimension YIELD_PARTNER_TAG = new Dimension(_YIELD_PARTNER_TAG);
public static final Dimension EXCHANGE_BIDDING_DEAL_ID = new Dimension(_EXCHANGE_BIDDING_DEAL_ID);
public static final Dimension EXCHANGE_BIDDING_DEAL_TYPE = new Dimension(_EXCHANGE_BIDDING_DEAL_TYPE);
public static final Dimension CLASSIFIED_ADVERTISER_ID = new Dimension(_CLASSIFIED_ADVERTISER_ID);
public static final Dimension CLASSIFIED_ADVERTISER_NAME = new Dimension(_CLASSIFIED_ADVERTISER_NAME);
public static final Dimension CLASSIFIED_BRAND_ID = new Dimension(_CLASSIFIED_BRAND_ID);
public static final Dimension CLASSIFIED_BRAND_NAME = new Dimension(_CLASSIFIED_BRAND_NAME);
public static final Dimension MEDIATION_TYPE = new Dimension(_MEDIATION_TYPE);
public static final Dimension NATIVE_TEMPLATE_ID = new Dimension(_NATIVE_TEMPLATE_ID);
public static final Dimension NATIVE_TEMPLATE_NAME = new Dimension(_NATIVE_TEMPLATE_NAME);
public static final Dimension NATIVE_STYLE_ID = new Dimension(_NATIVE_STYLE_ID);
public static final Dimension NATIVE_STYLE_NAME = new Dimension(_NATIVE_STYLE_NAME);
public static final Dimension CHILD_NETWORK_CODE = new Dimension(_CHILD_NETWORK_CODE);
public static final Dimension MOBILE_APP_RESOLVED_ID = new Dimension(_MOBILE_APP_RESOLVED_ID);
public static final Dimension MOBILE_APP_NAME = new Dimension(_MOBILE_APP_NAME);
public static final Dimension MOBILE_DEVICE_NAME = new Dimension(_MOBILE_DEVICE_NAME);
public static final Dimension MOBILE_INVENTORY_TYPE = new Dimension(_MOBILE_INVENTORY_TYPE);
public static final Dimension OPERATING_SYSTEM_VERSION_ID = new Dimension(_OPERATING_SYSTEM_VERSION_ID);
public static final Dimension OPERATING_SYSTEM_VERSION_NAME = new Dimension(_OPERATING_SYSTEM_VERSION_NAME);
public static final Dimension REQUEST_TYPE = new Dimension(_REQUEST_TYPE);
public static final Dimension AD_UNIT_STATUS = new Dimension(_AD_UNIT_STATUS);
public static final Dimension MASTER_COMPANION_CREATIVE_ID = new Dimension(_MASTER_COMPANION_CREATIVE_ID);
public static final Dimension MASTER_COMPANION_CREATIVE_NAME = new Dimension(_MASTER_COMPANION_CREATIVE_NAME);
public static final Dimension AUDIENCE_SEGMENT_ID = new Dimension(_AUDIENCE_SEGMENT_ID);
public static final Dimension AUDIENCE_SEGMENT_NAME = new Dimension(_AUDIENCE_SEGMENT_NAME);
public static final Dimension AUDIENCE_SEGMENT_DATA_PROVIDER_NAME = new Dimension(_AUDIENCE_SEGMENT_DATA_PROVIDER_NAME);
public static final Dimension WEB_PROPERTY_CODE = new Dimension(_WEB_PROPERTY_CODE);
public static final Dimension BUYING_AGENCY_NAME = new Dimension(_BUYING_AGENCY_NAME);
public static final Dimension BUYER_NETWORK_ID = new Dimension(_BUYER_NETWORK_ID);
public static final Dimension BUYER_NETWORK_NAME = new Dimension(_BUYER_NETWORK_NAME);
public static final Dimension BIDDER_ID = new Dimension(_BIDDER_ID);
public static final Dimension BIDDER_NAME = new Dimension(_BIDDER_NAME);
public static final Dimension ADVERTISER_DOMAIN_NAME = new Dimension(_ADVERTISER_DOMAIN_NAME);
public static final Dimension AD_EXCHANGE_OPTIMIZATION_TYPE = new Dimension(_AD_EXCHANGE_OPTIMIZATION_TYPE);
public static final Dimension ADVERTISER_VERTICAL_NAME = new Dimension(_ADVERTISER_VERTICAL_NAME);
public static final Dimension NIELSEN_SEGMENT = new Dimension(_NIELSEN_SEGMENT);
public static final Dimension NIELSEN_DEMOGRAPHICS = new Dimension(_NIELSEN_DEMOGRAPHICS);
public static final Dimension NIELSEN_RESTATEMENT_DATE = new Dimension(_NIELSEN_RESTATEMENT_DATE);
public static final Dimension NIELSEN_DEVICE_ID = new Dimension(_NIELSEN_DEVICE_ID);
public static final Dimension NIELSEN_DEVICE_NAME = new Dimension(_NIELSEN_DEVICE_NAME);
public static final Dimension PROGRAMMATIC_BUYER_ID = new Dimension(_PROGRAMMATIC_BUYER_ID);
public static final Dimension PROGRAMMATIC_BUYER_NAME = new Dimension(_PROGRAMMATIC_BUYER_NAME);
public static final Dimension REQUESTED_AD_SIZES = new Dimension(_REQUESTED_AD_SIZES);
public static final Dimension CREATIVE_SIZE_DELIVERED = new Dimension(_CREATIVE_SIZE_DELIVERED);
public static final Dimension PROGRAMMATIC_CHANNEL_ID = new Dimension(_PROGRAMMATIC_CHANNEL_ID);
public static final Dimension PROGRAMMATIC_CHANNEL_NAME = new Dimension(_PROGRAMMATIC_CHANNEL_NAME);
public static final Dimension CLASSIFIED_YIELD_PARTNER_NAME = new Dimension(_CLASSIFIED_YIELD_PARTNER_NAME);
public static final Dimension DP_DATE = new Dimension(_DP_DATE);
public static final Dimension DP_WEEK = new Dimension(_DP_WEEK);
public static final Dimension DP_MONTH_YEAR = new Dimension(_DP_MONTH_YEAR);
public static final Dimension DP_COUNTRY_CRITERIA_ID = new Dimension(_DP_COUNTRY_CRITERIA_ID);
public static final Dimension DP_COUNTRY_NAME = new Dimension(_DP_COUNTRY_NAME);
public static final Dimension DP_INVENTORY_TYPE = new Dimension(_DP_INVENTORY_TYPE);
public static final Dimension DP_CREATIVE_SIZE = new Dimension(_DP_CREATIVE_SIZE);
public static final Dimension DP_BRAND_NAME = new Dimension(_DP_BRAND_NAME);
public static final Dimension DP_ADVERTISER_NAME = new Dimension(_DP_ADVERTISER_NAME);
public static final Dimension DP_ADX_BUYER_NETWORK_NAME = new Dimension(_DP_ADX_BUYER_NETWORK_NAME);
public static final Dimension DP_MOBILE_DEVICE_NAME = new Dimension(_DP_MOBILE_DEVICE_NAME);
public static final Dimension DP_DEVICE_CATEGORY_NAME = new Dimension(_DP_DEVICE_CATEGORY_NAME);
public static final Dimension DP_TAG_ID = new Dimension(_DP_TAG_ID);
public static final Dimension DP_DEAL_ID = new Dimension(_DP_DEAL_ID);
public static final Dimension DP_APP_ID = new Dimension(_DP_APP_ID);
public static final Dimension CUSTOM_DIMENSION = new Dimension(_CUSTOM_DIMENSION);
public static final Dimension DEMAND_CHANNEL_ID = new Dimension(_DEMAND_CHANNEL_ID);
public static final Dimension DEMAND_CHANNEL_NAME = new Dimension(_DEMAND_CHANNEL_NAME);
public static final Dimension DOMAIN = new Dimension(_DOMAIN);
public static final Dimension SERVING_RESTRICTION_ID = new Dimension(_SERVING_RESTRICTION_ID);
public static final Dimension SERVING_RESTRICTION_NAME = new Dimension(_SERVING_RESTRICTION_NAME);
public static final Dimension UNIFIED_PRICING_RULE_ID = new Dimension(_UNIFIED_PRICING_RULE_ID);
public static final Dimension UNIFIED_PRICING_RULE_NAME = new Dimension(_UNIFIED_PRICING_RULE_NAME);
public static final Dimension FIRST_LOOK_PRICING_RULE_ID = new Dimension(_FIRST_LOOK_PRICING_RULE_ID);
public static final Dimension FIRST_LOOK_PRICING_RULE_NAME = new Dimension(_FIRST_LOOK_PRICING_RULE_NAME);
public static final Dimension BID_RANGE = new Dimension(_BID_RANGE);
public static final Dimension BID_REJECTION_REASON = new Dimension(_BID_REJECTION_REASON);
public static final Dimension BID_REJECTION_REASON_NAME = new Dimension(_BID_REJECTION_REASON_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_DOMAIN = new Dimension(_AD_TECHNOLOGY_PROVIDER_DOMAIN);
public static final Dimension PROGRAMMATIC_DEAL_ID = new Dimension(_PROGRAMMATIC_DEAL_ID);
public static final Dimension PROGRAMMATIC_DEAL_NAME = new Dimension(_PROGRAMMATIC_DEAL_NAME);
public static final Dimension AD_TECHNOLOGY_PROVIDER_ID = new Dimension(_AD_TECHNOLOGY_PROVIDER_ID);
public static final Dimension AD_TECHNOLOGY_PROVIDER_NAME = new Dimension(_AD_TECHNOLOGY_PROVIDER_NAME);
public static final Dimension TCF_VENDOR_ID = new Dimension(_TCF_VENDOR_ID);
public static final Dimension TCF_VENDOR_NAME = new Dimension(_TCF_VENDOR_NAME);
public static final Dimension SITE_NAME = new Dimension(_SITE_NAME);
public static final Dimension CHANNEL_NAME = new Dimension(_CHANNEL_NAME);
public static final Dimension URL_ID = new Dimension(_URL_ID);
public static final Dimension URL_NAME = new Dimension(_URL_NAME);
public static final Dimension VIDEO_AD_DURATION = new Dimension(_VIDEO_AD_DURATION);
public static final Dimension VIDEO_AD_TYPE_ID = new Dimension(_VIDEO_AD_TYPE_ID);
public static final Dimension VIDEO_AD_TYPE_NAME = new Dimension(_VIDEO_AD_TYPE_NAME);
public static final Dimension AD_EXCHANGE_PRODUCT_CODE = new Dimension(_AD_EXCHANGE_PRODUCT_CODE);
public static final Dimension AD_EXCHANGE_PRODUCT_NAME = new Dimension(_AD_EXCHANGE_PRODUCT_NAME);
public static final Dimension DYNAMIC_ALLOCATION_ID = new Dimension(_DYNAMIC_ALLOCATION_ID);
public static final Dimension DYNAMIC_ALLOCATION_NAME = new Dimension(_DYNAMIC_ALLOCATION_NAME);
public static final Dimension AD_TYPE_ID = new Dimension(_AD_TYPE_ID);
public static final Dimension AD_TYPE_NAME = new Dimension(_AD_TYPE_NAME);
public static final Dimension AD_LOCATION_ID = new Dimension(_AD_LOCATION_ID);
public static final Dimension AD_LOCATION_NAME = new Dimension(_AD_LOCATION_NAME);
public static final Dimension TARGETING_TYPE_CODE = new Dimension(_TARGETING_TYPE_CODE);
public static final Dimension TARGETING_TYPE_NAME = new Dimension(_TARGETING_TYPE_NAME);
public static final Dimension BRANDING_TYPE_CODE = new Dimension(_BRANDING_TYPE_CODE);
public static final Dimension BRANDING_TYPE_NAME = new Dimension(_BRANDING_TYPE_NAME);
public static final Dimension BANDWIDTH_ID = new Dimension(_BANDWIDTH_ID);
public static final Dimension BANDWIDTH_NAME = new Dimension(_BANDWIDTH_NAME);
public static final Dimension CARRIER_ID = new Dimension(_CARRIER_ID);
public static final Dimension CARRIER_NAME = new Dimension(_CARRIER_NAME);
public java.lang.String getValue() { return _value_;}
public static Dimension fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
Dimension enumeration = (Dimension)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static Dimension fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Dimension.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202508", "Dimension"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
|
googleapis/google-cloud-java | 36,674 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApiSpecsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for ListApiSpecs.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiSpecsRequest}
*/
public final class ListApiSpecsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApiSpecsRequest)
ListApiSpecsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApiSpecsRequest.newBuilder() to construct.
private ListApiSpecsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApiSpecsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApiSpecsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiSpecsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiSpecsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of specs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest other =
(com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListApiSpecs.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiSpecsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApiSpecsRequest)
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiSpecsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiSpecsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiSpecsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest build() {
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest result =
new com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest other) {
if (other == com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of specs.
* Format: `projects/*/locations/*/apis/*/versions/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of specs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of specs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of specs to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiSpecs` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiSpecs` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields except contents.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApiSpecsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApiSpecsRequest)
private static final com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest();
}
public static com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApiSpecsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListApiSpecsRequest>() {
@java.lang.Override
public ListApiSpecsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApiSpecsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApiSpecsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiSpecsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,447 | substratevm/src/com.oracle.svm.core/src/com/oracle/svm/core/jni/headers/JNINativeInterface.java | /*
* Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.core.jni.headers;
import org.graalvm.nativeimage.c.CContext;
import org.graalvm.nativeimage.c.function.CFunctionPointer;
import org.graalvm.nativeimage.c.struct.CField;
import org.graalvm.nativeimage.c.struct.CStruct;
import org.graalvm.nativeimage.c.type.WordPointer;
import org.graalvm.word.PointerBase;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.AllocObjectFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallBooleanMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallIntMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallLongMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallObjectMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallStaticLongMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallStaticObjectMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.CallVoidMethodAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.DefineClassFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.DeleteGlobalRefFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ExceptionCheckFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ExceptionOccurredFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ExceptionVoidFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.FindClassFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.FromReflectedFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.FromReflectedMethodFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetArrayLengthFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetBooleanFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetByteArrayElementsFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetFieldIDFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetIntFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetMethodIDFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetObjectArrayElementFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetObjectClassFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetObjectFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetStaticIntFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetStaticObjectFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetStringUTFCharsFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.GetSuperclassFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.IsAssignableFromFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.IsInstanceOfFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.IsSameObjectFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.MonitorEnterExitFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.NewByteArrayFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.NewGlobalRefFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.NewObjectAFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.NewObjectArrayFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.NewStringUTFFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.RegisterNativesFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ReleaseByteArrayElementsFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ReleaseStringUTFCharsFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.SetObjectArrayElementFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ThrowFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ThrowNewFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ToReflectedFieldFunctionPointer;
import com.oracle.svm.core.jni.headers.JNIFunctionPointerTypes.ToReflectedMethodFunctionPointer;
@CContext(JNIHeaderDirectives.class)
@CStruct(value = "JNINativeInterface_", addStructKeyword = true)
public interface JNINativeInterface extends PointerBase {
@CField
WordPointer reserved0();
@CField
WordPointer reserved1();
@CField
WordPointer reserved2();
@CField
WordPointer reserved3();
@CField
CFunctionPointer getGetVersion();
@CField
void setGetVersion(CFunctionPointer p);
@CField
DefineClassFunctionPointer getDefineClass();
@CField
void setDefineClass(DefineClassFunctionPointer p);
@CField
FindClassFunctionPointer getFindClass();
@CField
void setFindClass(FindClassFunctionPointer p);
@CField
FromReflectedMethodFunctionPointer getFromReflectedMethod();
@CField
void setFromReflectedMethod(FromReflectedMethodFunctionPointer p);
@CField
FromReflectedFieldFunctionPointer getFromReflectedField();
@CField
void setFromReflectedField(FromReflectedFieldFunctionPointer p);
@CField
ToReflectedMethodFunctionPointer getToReflectedMethod();
@CField
void setToReflectedMethod(ToReflectedMethodFunctionPointer p);
@CField
GetSuperclassFunctionPointer getGetSuperclass();
@CField
void setGetSuperclass(GetSuperclassFunctionPointer p);
@CField
IsAssignableFromFunctionPointer getIsAssignableFrom();
@CField
void setIsAssignableFrom(IsAssignableFromFunctionPointer p);
@CField
ToReflectedFieldFunctionPointer getToReflectedField();
@CField
void setToReflectedField(ToReflectedFieldFunctionPointer p);
@CField
ThrowFunctionPointer getThrow();
@CField
void setThrow(ThrowFunctionPointer p);
@CField
ThrowNewFunctionPointer getThrowNew();
@CField
void setThrowNew(ThrowNewFunctionPointer p);
@CField
ExceptionOccurredFunctionPointer getExceptionOccurred();
@CField
void setExceptionOccurred(ExceptionOccurredFunctionPointer p);
@CField
ExceptionVoidFunctionPointer getExceptionDescribe();
@CField
void setExceptionDescribe(ExceptionVoidFunctionPointer p);
@CField
ExceptionVoidFunctionPointer getExceptionClear();
@CField
void setExceptionClear(ExceptionVoidFunctionPointer p);
@CField
CFunctionPointer getFatalError();
@CField
void setFatalError(CFunctionPointer p);
@CField
CFunctionPointer getPushLocalFrame();
@CField
void setPushLocalFrame(CFunctionPointer p);
@CField
CFunctionPointer getPopLocalFrame();
@CField
void setPopLocalFrame(CFunctionPointer p);
@CField
NewGlobalRefFunctionPointer getNewGlobalRef();
@CField
void setNewGlobalRef(NewGlobalRefFunctionPointer p);
@CField
DeleteGlobalRefFunctionPointer getDeleteGlobalRef();
@CField
void setDeleteGlobalRef(DeleteGlobalRefFunctionPointer p);
@CField
CFunctionPointer getDeleteLocalRef();
@CField
void setDeleteLocalRef(CFunctionPointer p);
@CField
IsSameObjectFunctionPointer getIsSameObject();
@CField
void setIsSameObject(IsSameObjectFunctionPointer p);
@CField
CFunctionPointer getNewLocalRef();
@CField
void setNewLocalRef(CFunctionPointer p);
@CField
CFunctionPointer getEnsureLocalCapacity();
@CField
void setEnsureLocalCapacity(CFunctionPointer p);
@CField
AllocObjectFunctionPointer getAllocObject();
@CField
void setAllocObject(CFunctionPointer p);
@CField
CFunctionPointer getNewObject();
@CField
void setNewObject(CFunctionPointer p);
@CField
CFunctionPointer getNewObjectV();
@CField
void setNewObjectV(CFunctionPointer p);
@CField
NewObjectAFunctionPointer getNewObjectA();
@CField
void setNewObjectA(NewObjectAFunctionPointer p);
@CField
GetObjectClassFunctionPointer getGetObjectClass();
@CField
void setGetObjectClass(GetObjectClassFunctionPointer p);
@CField
IsInstanceOfFunctionPointer getIsInstanceOf();
@CField
void setIsInstanceOf(CFunctionPointer p);
@CField
GetMethodIDFunctionPointer getGetMethodID();
@CField
void setGetMethodID(GetMethodIDFunctionPointer p);
@CField
CFunctionPointer getCallObjectMethod();
@CField
void setCallObjectMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallObjectMethodV();
@CField
void setCallObjectMethodV(CFunctionPointer p);
@CField
CallObjectMethodAFunctionPointer getCallObjectMethodA();
@CField
void setCallObjectMethodA(CallObjectMethodAFunctionPointer p);
@CField
CFunctionPointer getCallBooleanMethod();
@CField
void setCallBooleanMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallBooleanMethodV();
@CField
void setCallBooleanMethodV(CFunctionPointer p);
@CField
CallBooleanMethodAFunctionPointer getCallBooleanMethodA();
@CField
void setCallBooleanMethodA(CallBooleanMethodAFunctionPointer p);
@CField
CFunctionPointer getCallByteMethod();
@CField
void setCallByteMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallByteMethodV();
@CField
void setCallByteMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallByteMethodA();
@CField
void setCallByteMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallCharMethod();
@CField
void setCallCharMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallCharMethodV();
@CField
void setCallCharMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallCharMethodA();
@CField
void setCallCharMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallShortMethod();
@CField
void setCallShortMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallShortMethodV();
@CField
void setCallShortMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallShortMethodA();
@CField
void setCallShortMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallIntMethod();
@CField
void setCallIntMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallIntMethodV();
@CField
void setCallIntMethodV(CFunctionPointer p);
@CField
CallIntMethodAFunctionPointer getCallIntMethodA();
@CField
void setCallIntMethodA(CallIntMethodAFunctionPointer p);
@CField
CFunctionPointer getCallLongMethod();
@CField
void setCallLongMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallLongMethodV();
@CField
void setCallLongMethodV(CFunctionPointer p);
@CField
CallLongMethodAFunctionPointer getCallLongMethodA();
@CField
void setCallLongMethodA(CallLongMethodAFunctionPointer p);
@CField
CFunctionPointer getCallFloatMethod();
@CField
void setCallFloatMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallFloatMethodV();
@CField
void setCallFloatMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallFloatMethodA();
@CField
void setCallFloatMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallDoubleMethod();
@CField
void setCallDoubleMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallDoubleMethodV();
@CField
void setCallDoubleMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallDoubleMethodA();
@CField
void setCallDoubleMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallVoidMethod();
@CField
void setCallVoidMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallVoidMethodV();
@CField
void setCallVoidMethodV(CFunctionPointer p);
@CField
CallVoidMethodAFunctionPointer getCallVoidMethodA();
@CField
void setCallVoidMethodA(CallVoidMethodAFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualObjectMethod();
@CField
void setCallNonvirtualObjectMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualObjectMethodV();
@CField
void setCallNonvirtualObjectMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualObjectMethodA();
@CField
void setCallNonvirtualObjectMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualBooleanMethod();
@CField
void setCallNonvirtualBooleanMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualBooleanMethodV();
@CField
void setCallNonvirtualBooleanMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualBooleanMethodA();
@CField
void setCallNonvirtualBooleanMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualByteMethod();
@CField
void setCallNonvirtualByteMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualByteMethodV();
@CField
void setCallNonvirtualByteMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualByteMethodA();
@CField
void setCallNonvirtualByteMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualCharMethod();
@CField
void setCallNonvirtualCharMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualCharMethodV();
@CField
void setCallNonvirtualCharMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualCharMethodA();
@CField
void setCallNonvirtualCharMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualShortMethod();
@CField
void setCallNonvirtualShortMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualShortMethodV();
@CField
void setCallNonvirtualShortMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualShortMethodA();
@CField
void setCallNonvirtualShortMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualIntMethod();
@CField
void setCallNonvirtualIntMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualIntMethodV();
@CField
void setCallNonvirtualIntMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualIntMethodA();
@CField
void setCallNonvirtualIntMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualLongMethod();
@CField
void setCallNonvirtualLongMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualLongMethodV();
@CField
void setCallNonvirtualLongMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualLongMethodA();
@CField
void setCallNonvirtualLongMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualFloatMethod();
@CField
void setCallNonvirtualFloatMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualFloatMethodV();
@CField
void setCallNonvirtualFloatMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualFloatMethodA();
@CField
void setCallNonvirtualFloatMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualDoubleMethod();
@CField
void setCallNonvirtualDoubleMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualDoubleMethodV();
@CField
void setCallNonvirtualDoubleMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualDoubleMethodA();
@CField
void setCallNonvirtualDoubleMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualVoidMethod();
@CField
void setCallNonvirtualVoidMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualVoidMethodV();
@CField
void setCallNonvirtualVoidMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallNonvirtualVoidMethodA();
@CField
void setCallNonvirtualVoidMethodA(CFunctionPointer p);
@CField
GetFieldIDFunctionPointer getGetFieldID();
@CField
void setGetFieldID(GetFieldIDFunctionPointer p);
@CField
GetObjectFieldFunctionPointer getGetObjectField();
@CField
void setGetObjectField(GetObjectFieldFunctionPointer p);
@CField
GetBooleanFieldFunctionPointer getGetBooleanField();
@CField
void setGetBooleanField(GetBooleanFieldFunctionPointer p);
@CField
CFunctionPointer getGetByteField();
@CField
void setGetByteField(CFunctionPointer p);
@CField
CFunctionPointer getGetCharField();
@CField
void setGetCharField(CFunctionPointer p);
@CField
CFunctionPointer getGetShortField();
@CField
void setGetShortField(CFunctionPointer p);
@CField
GetIntFieldFunctionPointer getGetIntField();
@CField
void setGetIntField(GetIntFieldFunctionPointer p);
@CField
CFunctionPointer getGetLongField();
@CField
void setGetLongField(CFunctionPointer p);
@CField
CFunctionPointer getGetFloatField();
@CField
void setGetFloatField(CFunctionPointer p);
@CField
CFunctionPointer getGetDoubleField();
@CField
void setGetDoubleField(CFunctionPointer p);
@CField
CFunctionPointer getSetObjectField();
@CField
void setSetObjectField(CFunctionPointer p);
@CField
CFunctionPointer getSetBooleanField();
@CField
void setSetBooleanField(CFunctionPointer p);
@CField
CFunctionPointer getSetByteField();
@CField
void setSetByteField(CFunctionPointer p);
@CField
CFunctionPointer getSetCharField();
@CField
void setSetCharField(CFunctionPointer p);
@CField
CFunctionPointer getSetShortField();
@CField
void setSetShortField(CFunctionPointer p);
@CField
CFunctionPointer getSetIntField();
@CField
void setSetIntField(CFunctionPointer p);
@CField
CFunctionPointer getSetLongField();
@CField
void setSetLongField(CFunctionPointer p);
@CField
CFunctionPointer getSetFloatField();
@CField
void setSetFloatField(CFunctionPointer p);
@CField
CFunctionPointer getSetDoubleField();
@CField
void setSetDoubleField(CFunctionPointer p);
@CField
GetMethodIDFunctionPointer getGetStaticMethodID();
@CField
void setGetStaticMethodID(GetMethodIDFunctionPointer p);
@CField
CFunctionPointer getCallStaticObjectMethod();
@CField
void setCallStaticObjectMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticObjectMethodV();
@CField
void setCallStaticObjectMethodV(CFunctionPointer p);
@CField
CallStaticObjectMethodAFunctionPointer getCallStaticObjectMethodA();
@CField
void setCallStaticObjectMethodA(CallStaticObjectMethodAFunctionPointer p);
@CField
CFunctionPointer getCallStaticBooleanMethod();
@CField
void setCallStaticBooleanMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticBooleanMethodV();
@CField
void setCallStaticBooleanMethodV(CFunctionPointer p);
@CField
CallBooleanMethodAFunctionPointer getCallStaticBooleanMethodA();
@CField
void setCallStaticBooleanMethodA(CallBooleanMethodAFunctionPointer p);
@CField
CFunctionPointer getCallStaticByteMethod();
@CField
void setCallStaticByteMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticByteMethodV();
@CField
void setCallStaticByteMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticByteMethodA();
@CField
void setCallStaticByteMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticCharMethod();
@CField
void setCallStaticCharMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticCharMethodV();
@CField
void setCallStaticCharMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticCharMethodA();
@CField
void setCallStaticCharMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticShortMethod();
@CField
void setCallStaticShortMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticShortMethodV();
@CField
void setCallStaticShortMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticShortMethodA();
@CField
void setCallStaticShortMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticIntMethod();
@CField
void setCallStaticIntMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticIntMethodV();
@CField
void setCallStaticIntMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticIntMethodA();
@CField
void setCallStaticIntMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticLongMethod();
@CField
void setCallStaticLongMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticLongMethodV();
@CField
void setCallStaticLongMethodV(CFunctionPointer p);
@CField
CallStaticLongMethodAFunctionPointer getCallStaticLongMethodA();
@CField
void setCallStaticLongMethodA(CallStaticLongMethodAFunctionPointer p);
@CField
CFunctionPointer getCallStaticFloatMethod();
@CField
void setCallStaticFloatMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticFloatMethodV();
@CField
void setCallStaticFloatMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticFloatMethodA();
@CField
void setCallStaticFloatMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticDoubleMethod();
@CField
void setCallStaticDoubleMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticDoubleMethodV();
@CField
void setCallStaticDoubleMethodV(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticDoubleMethodA();
@CField
void setCallStaticDoubleMethodA(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticVoidMethod();
@CField
void setCallStaticVoidMethod(CFunctionPointer p);
@CField
CFunctionPointer getCallStaticVoidMethodV();
@CField
void setCallStaticVoidMethodV(CFunctionPointer p);
@CField
CallVoidMethodAFunctionPointer getCallStaticVoidMethodA();
@CField
void setCallStaticVoidMethodA(CallVoidMethodAFunctionPointer p);
@CField
GetFieldIDFunctionPointer getGetStaticFieldID();
@CField
void setGetStaticFieldID(GetFieldIDFunctionPointer p);
@CField
GetStaticObjectFieldFunctionPointer getGetStaticObjectField();
@CField
void setGetStaticObjectField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticBooleanField();
@CField
void setGetStaticBooleanField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticByteField();
@CField
void setGetStaticByteField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticCharField();
@CField
void setGetStaticCharField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticShortField();
@CField
void setGetStaticShortField(CFunctionPointer p);
@CField
GetStaticIntFieldFunctionPointer getGetStaticIntField();
@CField
void setGetStaticIntField(GetStaticIntFieldFunctionPointer p);
@CField
CFunctionPointer getGetStaticLongField();
@CField
void setGetStaticLongField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticFloatField();
@CField
void setGetStaticFloatField(CFunctionPointer p);
@CField
CFunctionPointer getGetStaticDoubleField();
@CField
void setGetStaticDoubleField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticObjectField();
@CField
void setSetStaticObjectField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticBooleanField();
@CField
void setSetStaticBooleanField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticByteField();
@CField
void setSetStaticByteField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticCharField();
@CField
void setSetStaticCharField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticShortField();
@CField
void setSetStaticShortField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticIntField();
@CField
void setSetStaticIntField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticLongField();
@CField
void setSetStaticLongField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticFloatField();
@CField
void setSetStaticFloatField(CFunctionPointer p);
@CField
CFunctionPointer getSetStaticDoubleField();
@CField
void setSetStaticDoubleField(CFunctionPointer p);
@CField
CFunctionPointer getNewString();
@CField
void setNewString(CFunctionPointer p);
@CField
CFunctionPointer getGetStringLength();
@CField
void setGetStringLength(CFunctionPointer p);
@CField
CFunctionPointer getGetStringChars();
@CField
void setGetStringChars(CFunctionPointer p);
@CField
CFunctionPointer getReleaseStringChars();
@CField
void setReleaseStringChars(CFunctionPointer p);
@CField
NewStringUTFFunctionPointer getNewStringUTF();
@CField
void setNewStringUTF(NewStringUTFFunctionPointer p);
@CField
CFunctionPointer getGetStringUTFLength();
@CField
void setGetStringUTFLength(CFunctionPointer p);
@CField
GetStringUTFCharsFunctionPointer getGetStringUTFChars();
@CField
void setGetStringUTFChars(GetStringUTFCharsFunctionPointer p);
@CField
ReleaseStringUTFCharsFunctionPointer getReleaseStringUTFChars();
@CField
void setReleaseStringUTFChars(ReleaseStringUTFCharsFunctionPointer p);
@CField
GetArrayLengthFunctionPointer getGetArrayLength();
@CField
void setGetArrayLength(GetArrayLengthFunctionPointer p);
@CField
NewObjectArrayFunctionPointer getNewObjectArray();
@CField
void setNewObjectArray(NewObjectArrayFunctionPointer p);
@CField
GetObjectArrayElementFunctionPointer getGetObjectArrayElement();
@CField
void setGetObjectArrayElement(GetObjectArrayElementFunctionPointer p);
@CField
SetObjectArrayElementFunctionPointer getSetObjectArrayElement();
@CField
void setSetObjectArrayElement(SetObjectArrayElementFunctionPointer p);
@CField
CFunctionPointer getNewBooleanArray();
@CField
void setNewBooleanArray(CFunctionPointer p);
@CField
NewByteArrayFunctionPointer getNewByteArray();
@CField
void setNewByteArray(NewByteArrayFunctionPointer p);
@CField
CFunctionPointer getNewCharArray();
@CField
void setNewCharArray(CFunctionPointer p);
@CField
CFunctionPointer getNewShortArray();
@CField
void setNewShortArray(CFunctionPointer p);
@CField
CFunctionPointer getNewIntArray();
@CField
void setNewIntArray(CFunctionPointer p);
@CField
CFunctionPointer getNewLongArray();
@CField
void setNewLongArray(CFunctionPointer p);
@CField
CFunctionPointer getNewFloatArray();
@CField
void setNewFloatArray(CFunctionPointer p);
@CField
CFunctionPointer getNewDoubleArray();
@CField
void setNewDoubleArray(CFunctionPointer p);
@CField
CFunctionPointer getGetBooleanArrayElements();
@CField
void setGetBooleanArrayElements(CFunctionPointer p);
@CField
GetByteArrayElementsFunctionPointer getGetByteArrayElements();
@CField
void setGetByteArrayElements(GetByteArrayElementsFunctionPointer p);
@CField
CFunctionPointer getGetCharArrayElements();
@CField
void setGetCharArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetShortArrayElements();
@CField
void setGetShortArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetIntArrayElements();
@CField
void setGetIntArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetLongArrayElements();
@CField
void setGetLongArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetFloatArrayElements();
@CField
void setGetFloatArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetDoubleArrayElements();
@CField
void setGetDoubleArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseBooleanArrayElements();
@CField
void setReleaseBooleanArrayElements(CFunctionPointer p);
@CField
ReleaseByteArrayElementsFunctionPointer getReleaseByteArrayElements();
@CField
void setReleaseByteArrayElements(ReleaseByteArrayElementsFunctionPointer p);
@CField
CFunctionPointer getReleaseCharArrayElements();
@CField
void setReleaseCharArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseShortArrayElements();
@CField
void setReleaseShortArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseIntArrayElements();
@CField
void setReleaseIntArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseLongArrayElements();
@CField
void setReleaseLongArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseFloatArrayElements();
@CField
void setReleaseFloatArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getReleaseDoubleArrayElements();
@CField
void setReleaseDoubleArrayElements(CFunctionPointer p);
@CField
CFunctionPointer getGetBooleanArrayRegion();
@CField
void setGetBooleanArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetByteArrayRegion();
@CField
void setGetByteArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetCharArrayRegion();
@CField
void setGetCharArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetShortArrayRegion();
@CField
void setGetShortArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetIntArrayRegion();
@CField
void setGetIntArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetLongArrayRegion();
@CField
void setGetLongArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetFloatArrayRegion();
@CField
void setGetFloatArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetDoubleArrayRegion();
@CField
void setGetDoubleArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetBooleanArrayRegion();
@CField
void setSetBooleanArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetByteArrayRegion();
@CField
void setSetByteArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetCharArrayRegion();
@CField
void setSetCharArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetShortArrayRegion();
@CField
void setSetShortArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetIntArrayRegion();
@CField
void setSetIntArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetLongArrayRegion();
@CField
void setSetLongArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetFloatArrayRegion();
@CField
void setSetFloatArrayRegion(CFunctionPointer p);
@CField
CFunctionPointer getSetDoubleArrayRegion();
@CField
void setSetDoubleArrayRegion(CFunctionPointer p);
@CField
RegisterNativesFunctionPointer getRegisterNatives();
@CField
void setRegisterNatives(RegisterNativesFunctionPointer p);
@CField
CFunctionPointer getUnregisterNatives();
@CField
void setUnregisterNatives(CFunctionPointer p);
@CField
MonitorEnterExitFunctionPointer getMonitorEnter();
@CField
void setMonitorEnter(MonitorEnterExitFunctionPointer p);
@CField
MonitorEnterExitFunctionPointer getMonitorExit();
@CField
void setMonitorExit(MonitorEnterExitFunctionPointer p);
@CField
CFunctionPointer getGetJavaVM();
@CField
void setGetJavaVM(CFunctionPointer p);
@CField
CFunctionPointer getGetStringRegion();
@CField
void setGetStringRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetStringUTFRegion();
@CField
void setGetStringUTFRegion(CFunctionPointer p);
@CField
CFunctionPointer getGetPrimitiveArrayCritical();
@CField
void setGetPrimitiveArrayCritical(CFunctionPointer p);
@CField
CFunctionPointer getReleasePrimitiveArrayCritical();
@CField
void setReleasePrimitiveArrayCritical(CFunctionPointer p);
@CField
CFunctionPointer getGetStringCritical();
@CField
void setGetStringCritical(CFunctionPointer p);
@CField
CFunctionPointer getReleaseStringCritical();
@CField
void setReleaseStringCritical(CFunctionPointer p);
@CField
CFunctionPointer getNewWeakGlobalRef();
@CField
void setNewWeakGlobalRef(CFunctionPointer p);
@CField
CFunctionPointer getDeleteWeakGlobalRef();
@CField
void setDeleteWeakGlobalRef(CFunctionPointer p);
@CField
ExceptionCheckFunctionPointer getExceptionCheck();
@CField
void setExceptionCheck(ExceptionCheckFunctionPointer p);
@CField
CFunctionPointer getNewDirectByteBuffer();
@CField
void setNewDirectByteBuffer(CFunctionPointer p);
@CField
CFunctionPointer getGetDirectBufferAddress();
@CField
void setGetDirectBufferAddress(CFunctionPointer p);
@CField
CFunctionPointer getGetDirectBufferCapacity();
@CField
void setGetDirectBufferCapacity(CFunctionPointer p);
@CField
CFunctionPointer getGetObjectRefType();
// JNI 1.6
@CField
void setGetObjectRefType(CFunctionPointer p);
// 9: Module features
@CField
void setGetModule(CFunctionPointer p);
// Virtual threads
@CField
void setIsVirtualThread(CFunctionPointer p);
// JNI_VERSION_24
@CField
CFunctionPointer getGetStringUTFLengthAsLong();
@CField
void setGetStringUTFLengthAsLong(CFunctionPointer p);
}
|
apache/johnzon | 36,921 | johnzon-jsonlogic/src/main/java/org/apache/johnzon/jsonlogic/JohnzonJsonLogic.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.johnzon.jsonlogic;
import org.apache.johnzon.jsonlogic.spi.Operator;
import jakarta.json.JsonArray;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonBuilderFactory;
import jakarta.json.JsonException;
import jakarta.json.JsonMergePatch;
import jakarta.json.JsonNumber;
import jakarta.json.JsonObject;
import jakarta.json.JsonPatch;
import jakarta.json.JsonPointer;
import jakarta.json.JsonString;
import jakarta.json.JsonStructure;
import jakarta.json.JsonValue;
import jakarta.json.spi.JsonProvider;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiPredicate;
import java.util.stream.Collector;
import java.util.stream.DoubleStream;
import java.util.stream.Stream;
import static java.util.Collections.emptyMap;
import static java.util.concurrent.CompletableFuture.completedFuture;
import static java.util.stream.Collectors.joining;
public class JohnzonJsonLogic {
private final JsonProvider provider;
private final Map<String, Operator> operators = new HashMap<>();
private final Map<String, JsonPointer> pointers = new ConcurrentHashMap<>();
private final Map<JsonArray, JsonPatch> jsonPatches = new ConcurrentHashMap<>();
private final Map<JsonValue, JsonMergePatch> jsonMergePatches = new ConcurrentHashMap<>();
private final JsonBuilderFactory builderFactory;
private boolean cachePointers;
private boolean cacheJsonPatches;
private boolean cacheJsonMergePatches;
public JohnzonJsonLogic() {
this(JsonProvider.provider());
registerDefaultOperators();
registerExtensionsOperators();
}
public JohnzonJsonLogic(final JsonProvider provider) {
this.provider = provider;
this.builderFactory = provider.createBuilderFactory(emptyMap());
}
public JohnzonJsonLogic cachePointers() {
this.cachePointers = true;
return this;
}
public JohnzonJsonLogic cacheJsonPatches() {
this.cacheJsonPatches = true;
return this;
}
public JohnzonJsonLogic cacheJsonMergePatches() {
this.cacheJsonMergePatches = true;
return this;
}
public JohnzonJsonLogic registerOperator(final String name, final Operator impl) {
operators.put(name, impl);
return this;
}
public JsonValue apply(final JsonValue logic, final JsonValue args) {
if (logic.getValueType() != JsonValue.ValueType.OBJECT) {
return logic;
}
final JsonObject object = logic.asJsonObject();
if (object.size() > 1) {
return object;
}
final Set<String> keys = object.keySet();
if (keys.size() != 1) {
throw invalidArgument(keys);
}
final String operator = keys.iterator().next();
final Operator impl = operators.get(operator);
if (impl == null) {
throw missingOperator(operator);
}
return impl.apply(this, object.get(operator), args);
}
public CompletionStage<JsonValue> applyStage(final JsonValue logic, final JsonValue args) {
if (logic.getValueType() != JsonValue.ValueType.OBJECT) {
return completedFuture(logic);
}
final JsonObject object = logic.asJsonObject();
if (object.size() > 1) {
return completedFuture(object);
}
final Set<String> keys = object.keySet();
if (keys.size() != 1) {
final CompletableFuture<JsonValue> promise = new CompletableFuture<>();
promise.completeExceptionally(invalidArgument(keys));
return promise;
}
final String operator = keys.iterator().next();
final Operator impl = operators.get(operator);
if (impl == null) {
final CompletableFuture<JsonValue> promise = new CompletableFuture<>();
promise.completeExceptionally(missingOperator(operator));
return promise;
}
return impl.applyStage(this, object.get(operator), args);
}
public boolean isTruthy(final JsonValue value) {
return !isFalsy(value);
}
public boolean isFalsy(final JsonValue value) {
switch (value.getValueType()) {
case NUMBER:
return JsonNumber.class.cast(value).intValue() == 0;
case ARRAY:
return value.asJsonArray().isEmpty();
case STRING:
return JsonString.class.cast(value).getString().isEmpty();
case FALSE:
case NULL:
return true;
default:
return false;
}
}
public boolean areEqualsWithCoercion(final JsonValue a, final JsonValue b) {
if (a == b) {
return true;
}
if (a == null) {
return false;
}
if (b == null) {
return false;
}
if (a.getValueType() == b.getValueType()) {
return a.equals(b);
}
switch (a.getValueType()) {
case STRING:
switch (b.getValueType()) {
case NUMBER:
try {
return Double.parseDouble(JsonString.class.cast(a).getString()) == JsonNumber.class.cast(b).doubleValue();
} catch (final NumberFormatException nfe) {
return false;
}
case TRUE:
case FALSE:
return isFalsy(a) == isFalsy(b);
default:
return false;
}
case NUMBER:
switch (b.getValueType()) {
case STRING:
try {
return Double.parseDouble(JsonString.class.cast(b).getString()) == JsonNumber.class.cast(a).doubleValue();
} catch (final NumberFormatException nfe) {
return false;
}
case TRUE:
case FALSE:
default:
return isFalsy(a) == isFalsy(b);
}
case TRUE:
case FALSE:
return isFalsy(a) == isFalsy(b);
default:
return false;
}
}
public JohnzonJsonLogic registerExtensionsOperators() {
registerOperator("jsonpatch", (logic, config, params) -> getJsonPatch(config)
.apply(JsonStructure.class.cast(params)));
registerOperator("jsonmergepatch", (logic, config, params) -> getJsonMergePatch(config)
.apply(params));
registerOperator("jsonmergediff", (logic, config, params) -> {
final JsonArray array = params.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("jsonmergediff should have 2 parameters (in an array): " + array);
}
return provider.createMergeDiff(config, array.get(0)).apply(array.get(1));
});
return this;
}
private JsonPatch getJsonPatch(final JsonValue config) {
if (!cacheJsonPatches) {
return provider.createPatch(config.asJsonArray());
}
return jsonPatches.computeIfAbsent(config.asJsonArray(), provider::createPatch);
}
private JsonMergePatch getJsonMergePatch(final JsonValue config) {
if (!cacheJsonPatches) {
return provider.createMergePatch(config);
}
return jsonMergePatches.computeIfAbsent(config, provider::createMergePatch);
}
// to not depend on a logger we don't register "log" operation but it is trivial to do:
public JohnzonJsonLogic registerDefaultOperators() {
registerOperator("log", (logic, config, params) -> {
throw new UnsupportedOperationException("Log is not supported by default, register the following operator with your preferred logger:\n\n" +
"jsonLogic.registerOperator(\"log\", (l, c, p) -> log.info(String.valueOf(l.apply(c, p)));\n");
});
registerOperator("var", (logic, config, params) -> varImpl(config, params));
registerOperator("missing", this::missingImpl);
registerOperator("missing_some", this::missingSomeImpl);
registerOperator("if", this::ifImpl);
registerOperator("<", (logic, config, params) -> numericComparison((a, b) -> a < b, config, logic, params));
registerOperator(">", (logic, config, params) -> numericComparison((a, b) -> a > b, config, logic, params));
registerOperator("<=", (logic, config, params) -> numericComparison((a, b) -> a <= b, config, logic, params));
registerOperator(">=", (logic, config, params) -> numericComparison((a, b) -> a >= b, config, logic, params));
registerOperator("==", (logic, config, params) -> comparison(this::areEqualsWithCoercion, config, logic, params));
registerOperator("!=", (logic, config, params) -> comparison((a, b) -> !areEqualsWithCoercion(a, b), config, logic, params));
registerOperator("===", (logic, config, params) -> comparison(Objects::equals, config, logic, params));
registerOperator("!==", (logic, config, params) -> comparison((a, b) -> !Objects.equals(a, b), config, logic, params));
registerOperator("!", this::notImpl);
registerOperator("!!", this::toBooleanImpl);
registerOperator("or", this::orImpl);
registerOperator("and", this::andImpl);
registerOperator("min", this::minImpl);
registerOperator("max", this::maxImpl);
registerOperator("+", this::plusImpl);
registerOperator("*", this::multiplyImpl);
registerOperator("-", this::minusImpl);
registerOperator("/", this::divideImpl);
registerOperator("%", this::moduloImpl);
registerOperator("map", this::mapImpl);
registerOperator("filter", this::filterImpl);
registerOperator("reduce", this::reduceImpl);
registerOperator("all", (logic, config, params) ->
arrayTest(logic, config, params, (subConf, stream) -> stream.allMatch(it -> isTruthy(logic.apply(subConf, it)))));
registerOperator("some", (logic, config, params) ->
arrayTest(logic, config, params, (subConf, stream) -> stream.anyMatch(it -> isTruthy(logic.apply(subConf, it)))));
registerOperator("none", (logic, config, params) ->
arrayTest(logic, config, params, (subConf, stream) -> stream.noneMatch(it -> isTruthy(logic.apply(subConf, it)))));
registerOperator("merge", (logic, config, params) -> mergeImpl(config));
registerOperator("in", this::inImpl);
registerOperator("cat", this::catImpl);
registerOperator("substr", this::substrImpl);
return this;
}
private IllegalArgumentException invalidArgument(final Set<String> keys) {
return new IllegalArgumentException("Invalid argument, multiple keys found: " + keys);
}
private IllegalArgumentException missingOperator(final String operator) {
return new IllegalArgumentException("Missing operator '" + operator + "'");
}
private JsonValue minImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("min only supports arrays: '" + config + "'");
}
return provider.createValue(mapToDouble(logic, config, params).min().orElse(0));
}
private JsonValue maxImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("max only supports arrays: '" + config + "'");
}
return provider.createValue(mapToDouble(logic, config, params).max().orElse(0));
}
private JsonValue plusImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
return castToNumber(logic.apply(config, params));
}
if (config.asJsonArray().isEmpty()) {
return provider.createValue(0);
}
return provider.createValue(mapToDouble(logic, config, params).sum());
}
private JsonValue multiplyImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("* only supports arrays: '" + config + "'");
}
if (config.asJsonArray().isEmpty()) {
return provider.createValue(0);
}
return provider.createValue(mapToDouble(logic, config, params)
.reduce(1, (a, b) -> a * b));
}
private JsonValue minusImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("- only supports arrays with 2 elements: '" + config + "'");
}
return provider.createValue(JsonNumber.class.cast(logic.apply(array.get(0), params)).doubleValue() -
JsonNumber.class.cast(logic.apply(array.get(1), params)).doubleValue());
}
final JsonValue applied = logic.apply(config, params);
if (applied.getValueType() == JsonValue.ValueType.NUMBER) {
return provider.createValue(-1 * JsonNumber.class.cast(applied).doubleValue());
}
throw new IllegalArgumentException("Unsupported - operation: '" + config + "'");
}
private JsonValue divideImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("/ only supports arrays with 2 elements: '" + config + "'");
}
return provider.createValue(JsonNumber.class.cast(logic.apply(array.get(0), params)).doubleValue() /
JsonNumber.class.cast(logic.apply(array.get(1), params)).doubleValue());
}
throw new IllegalArgumentException("Unsupported / operation: '" + config + "'");
}
private JsonValue moduloImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("% only supports arrays with 2 elements: '" + config + "'");
}
return provider.createValue(JsonNumber.class.cast(logic.apply(array.get(0), params)).doubleValue() %
JsonNumber.class.cast(logic.apply(array.get(1), params)).doubleValue());
}
throw new IllegalArgumentException("Unsupported % operation: '" + config + "'");
}
private JsonValue mapImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("map only supports arrays with 2 elements: '" + config + "'");
}
final JsonValue items = logic.apply(array.get(0), params);
if (items.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("Expected '" + array.get(0) + "' to be an array, got " + items.getValueType());
}
final JsonValue subLogic = array.get(1);
return items.asJsonArray().stream()
.map(it -> logic.apply(subLogic, it))
.collect(toArray());
}
throw new IllegalArgumentException("Unsupported map operation: '" + config + "'");
}
private JsonValue filterImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("filter only supports arrays with 2 elements: '" + config + "'");
}
final JsonValue items = logic.apply(array.get(0), params);
if (items.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("Expected '" + array.get(0) + "' to be an array, got " + items.getValueType());
}
final JsonValue subLogic = array.get(1);
return items.asJsonArray().stream()
.filter(it -> isTruthy(logic.apply(subLogic, it)))
.collect(toArray());
}
throw new IllegalArgumentException("Unsupported filter operation: '" + config + "'");
}
private JsonValue mergeImpl(final JsonValue config) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("merge only support an array as configuration, got '" + config + "'");
}
return config.asJsonArray().stream()
.flatMap(it -> it.getValueType() == JsonValue.ValueType.ARRAY ?
it.asJsonArray().stream() : builderFactory.createArrayBuilder().add(it).build().stream())
.collect(toArray());
}
private JsonValue substrImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY || config.asJsonArray().size() < 2) {
throw new IllegalArgumentException("substr only support an array as configuration, got '" + config + "'");
}
final JsonArray array = config.asJsonArray();
final JsonValue value = logic.apply(array.get(0), params);
if (value.getValueType() != JsonValue.ValueType.STRING) {
throw new IllegalArgumentException("expected a string for substr, got '" + value + "'");
}
final String valueStr = JsonString.class.cast(value).getString();
final JsonValue from = logic.apply(array.get(1), params);
if (from.getValueType() != JsonValue.ValueType.NUMBER) {
throw new IllegalArgumentException("expected a number for substr, got '" + from + "'");
}
final int fromIdx = JsonNumber.class.cast(from).intValue();
final int start;
if (fromIdx < 0) {
start = valueStr.length() + fromIdx;
} else {
start = fromIdx;
}
final int end;
if (array.size() == 3) {
final JsonValue to = logic.apply(array.get(2), params);
if (to.getValueType() != JsonValue.ValueType.NUMBER) {
throw new IllegalArgumentException("expected a number for substr, got '" + to + "'");
}
final int length = JsonNumber.class.cast(to).intValue();
end = length < 0 ? valueStr.length() + length : start + length;
} else {
end = valueStr.length();
}
return provider.createValue(valueStr.substring(start, end));
}
private JsonValue catImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("cat only support an array of string elements as configuration, got '" + config + "'");
}
return provider.createValue(config.asJsonArray().stream()
.map(it -> logic.apply(it, params))
.filter(it -> it.getValueType() == JsonValue.ValueType.STRING)
.map(it -> JsonString.class.cast(it).getString())
.collect(joining()));
}
private JsonValue inImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY || config.asJsonArray().size() != 2) {
throw new IllegalArgumentException("in only support an array of 2 elements as configuration, got '" + config + "'");
}
final JsonArray array = config.asJsonArray();
final JsonValue expected = logic.apply(array.get(0), params);
final JsonValue value = logic.apply(array.get(1), params);
switch (value.getValueType()) {
case STRING:
return expected.getValueType() == JsonValue.ValueType.STRING && JsonString.class.cast(value).getString()
.contains(JsonString.class.cast(expected).getString()) ? JsonValue.TRUE : JsonValue.FALSE;
case ARRAY:
return value.getValueType() == JsonValue.ValueType.ARRAY && value.asJsonArray().stream()
.anyMatch(it -> Objects.equals(it, expected)) ? JsonValue.TRUE : JsonValue.FALSE;
default:
return JsonValue.FALSE;
}
}
private JsonValue reduceImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() < 2 || array.size() > 3) {
throw new IllegalArgumentException("filter only supports arrays with 2 or 3 elements: '" + config + "'");
}
final JsonValue items = logic.apply(array.get(0), params);
if (items.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("Expected '" + array.get(0) + "' to be an array, got " + items.getValueType());
}
final JsonValue subLogic = array.get(1);
return items.asJsonArray().stream()
.reduce(
array.size() == 3 ? array.get(2) : JsonValue.NULL,
(accumulator, current) -> logic.apply(subLogic, builderFactory.createObjectBuilder()
.add("accumulator", accumulator)
.add("current", current)
.build()));
}
throw new IllegalArgumentException("Unsupported reduce operation: '" + config + "'");
}
private JsonValue andImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("and only supports arrays: '" + config + "'");
}
final JsonArray array = config.asJsonArray();
return array.stream()
.map(it -> logic.apply(it, params))
.filter(this::isFalsy)
.findFirst()
.orElseGet(() -> array.isEmpty() ? JsonValue.FALSE : array.get(array.size() - 1));
}
private JsonValue orImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("or only supports arrays: '" + config + "'");
}
final JsonArray array = config.asJsonArray();
return array.stream()
.map(it -> logic.apply(it, params))
.filter(this::isTruthy)
.findFirst()
.orElseGet(() -> array.isEmpty() ? JsonValue.FALSE : array.get(array.size() - 1));
}
private JsonValue toBooleanImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 1) {
throw new IllegalArgumentException("!! takes only one parameter '" + config + "'");
}
return isTruthy(logic.apply(array.get(0), params)) ? JsonValue.TRUE : JsonValue.FALSE;
}
return isTruthy(logic.apply(config, params)) ? JsonValue.TRUE : JsonValue.FALSE;
}
private JsonValue notImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 1) {
throw new IllegalArgumentException("! takes only one parameter '" + config + "'");
}
return isFalsy(logic.apply(array.get(0), params)) ? JsonValue.TRUE : JsonValue.FALSE;
}
return isFalsy(logic.apply(config, params)) ? JsonValue.TRUE : JsonValue.FALSE;
}
private JsonValue ifImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("if config must be an array");
}
final JsonArray configArray = config.asJsonArray();
if (configArray.size() < 2) {
throw new IllegalArgumentException("if config must be an array >= 2 elements");
}
for (int i = 0; i < configArray.size() - 1; i += 2) {
if (isTruthy(logic.apply(configArray.get(i), params))) {
return logic.apply(configArray.get(i + 1), params);
}
}
if (configArray.size() % 2 == 1) {
return configArray.get(configArray.size() - 1);
}
return JsonValue.FALSE;
}
private JsonValue missingSomeImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("missing_some takes an array as parameter: '" + config + "'");
}
final JsonArray configArray = config.asJsonArray();
if (configArray.size() != 2) {
throw new IllegalArgumentException("missing_some takes an array with a number and a path array as parameter: '" + config + "'");
}
final JsonArray tested = configArray.get(1).asJsonArray();
final JsonArray missing = tested.stream()
.filter(it -> varImpl(logic.apply(it, params), params) == JsonValue.NULL)
.collect(toArray());
if ((tested.size() - missing.size()) < JsonNumber.class.cast(logic.apply(configArray.get(0), params)).intValue()) {
return missing;
}
return JsonValue.EMPTY_JSON_ARRAY;
}
private JsonValue missingImpl(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("missing takes an array as parameter: '" + config + "'");
}
return config.asJsonArray().stream()
.filter(it -> varImpl(logic.apply(it, params), params) == JsonValue.NULL)
.collect(toArray());
}
private JsonValue arrayTest(final JohnzonJsonLogic self, final JsonValue config, final JsonValue params,
final BiPredicate<JsonValue, Stream<JsonValue>> tester) {
if (config.getValueType() == JsonValue.ValueType.ARRAY) {
final JsonArray array = config.asJsonArray();
if (array.size() != 2) {
throw new IllegalArgumentException("array test only supports arrays with 2: '" + config + "'");
}
final JsonValue items = self.apply(array.get(0), params);
if (items.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("Expected '" + array.get(0) + "' to be an array, got " + items.getValueType());
}
final JsonValue subLogic = array.get(1);
return tester.test(subLogic, items.asJsonArray().stream()) ? JsonValue.TRUE : JsonValue.FALSE;
}
throw new IllegalArgumentException("Unsupported array test operation: '" + config + "'");
}
private JsonValue castToNumber(final JsonValue value) {
switch (value.getValueType()) {
case NUMBER:
return value;
case STRING:
return provider.createValue(Double.parseDouble(JsonString.class.cast(value).getString()));
default:
throw new IllegalArgumentException("Unsupported value to number: '" + value + "'");
}
}
private DoubleStream mapToDouble(final JohnzonJsonLogic logic, final JsonValue config, final JsonValue params) {
return config.asJsonArray().stream()
.map(it -> logic.apply(it, params))
.filter(it -> it.getValueType() == JsonValue.ValueType.NUMBER)
.mapToDouble(it -> JsonNumber.class.cast(it).doubleValue());
}
private JsonValue comparison(final BiPredicate<JsonValue, JsonValue> comparator,
final JsonValue config, final JohnzonJsonLogic self,
final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("comparison config must be an array");
}
final JsonArray values = config.asJsonArray();
if (values.size() != 2) {
throw new IllegalArgumentException("comparison requires 2 arguments");
}
final JsonValue first = self.apply(values.get(0), params);
final JsonValue second = self.apply(values.get(1), params);
return comparator.test(first, second) ? JsonValue.TRUE : JsonValue.FALSE;
}
private JsonValue numericComparison(final BiPredicate<Double, Double> comparator,
final JsonValue config, final JohnzonJsonLogic self,
final JsonValue params) {
if (config.getValueType() != JsonValue.ValueType.ARRAY) {
throw new IllegalArgumentException("numeric comparison config must be an array");
}
final JsonArray configArray = config.asJsonArray();
switch (configArray.size()) {
case 2: {
final JsonValue first = self.apply(configArray.get(0), params);
final JsonValue second = self.apply(configArray.get(1), params);
if (Stream.of(first, second).anyMatch(it -> it.getValueType() != JsonValue.ValueType.NUMBER)) {
throw new IllegalArgumentException("Only numbers can be compared: " + first + " / " + second);
}
return comparator.test(JsonNumber.class.cast(first).doubleValue(), JsonNumber.class.cast(second).doubleValue()) ?
JsonValue.TRUE : JsonValue.FALSE;
}
case 3: { // between
final JsonValue first = self.apply(configArray.get(0), params);
final JsonValue second = self.apply(configArray.get(1), params);
final JsonValue third = self.apply(configArray.get(1), params);
if (Stream.of(first, second, third).anyMatch(it -> it.getValueType() != JsonValue.ValueType.NUMBER)) {
throw new IllegalArgumentException("Only numbers can be compared");
}
return comparator.test(JsonNumber.class.cast(first).doubleValue(), JsonNumber.class.cast(second).doubleValue()) &&
comparator.test(JsonNumber.class.cast(second).doubleValue(), JsonNumber.class.cast(third).doubleValue()) ?
JsonValue.TRUE : JsonValue.FALSE;
}
default:
throw new IllegalArgumentException("numeric comparison config must be an array >= 2 elements");
}
}
private JsonValue varImpl(final JsonValue config, final JsonValue params) {
switch (config.getValueType()) {
case ARRAY:
final JsonArray values = config.asJsonArray();
if (values.isEmpty()) {
throw new IllegalArgumentException("var should have at least one parameter");
}
final JsonValue accessor = apply(values.get(0), params);
switch (accessor.getValueType()) {
case NUMBER:
final int index = JsonNumber.class.cast(accessor).intValue();
final JsonArray array = params.asJsonArray();
final JsonValue arrayAttribute = index >= array.size() ? null : array.get(index);
return arrayAttribute == null ? (values.size() > 1 ? apply(values.get(1), params) : JsonValue.NULL) : arrayAttribute;
case STRING:
final JsonValue objectAttribute = extractValue(params, JsonString.class.cast(accessor).getString());
return objectAttribute == JsonValue.NULL && values.size() > 1 ? apply(values.get(1), params) : objectAttribute;
default:
throw new IllegalArgumentException("Unsupported var first paraemter: '" + accessor + "', should be string or number");
}
case STRING:
return extractValue(params, JsonString.class.cast(config).getString());
case NUMBER:
final int index = JsonNumber.class.cast(config).intValue();
final JsonArray array = params.asJsonArray();
final JsonValue arrayAttribute = array.size() <= index ? null : array.get(index);
return arrayAttribute == null ? JsonValue.NULL : arrayAttribute;
case OBJECT:
return varImpl(apply(config, params), params);
default:
throw new IllegalArgumentException("Unsupported configuration for var: '" + config + "'");
}
}
private JsonValue extractValue(final JsonValue params, final String string) {
if (string.isEmpty()) {
return params;
}
final JsonValue objectAttribute;
if (string.contains(".")) {
try {
objectAttribute = toPointer(string).getValue(JsonStructure.class.cast(params));
} catch (final JsonException je) { // missing
return JsonValue.NULL;
}
} else if (params.getValueType() == JsonValue.ValueType.OBJECT) {
objectAttribute = params.asJsonObject().get(string);
} else if (params.getValueType() == JsonValue.ValueType.ARRAY) {
objectAttribute = params.asJsonArray().get(Integer.parseInt(string.trim()));
} else {
objectAttribute = null;
}
return objectAttribute == null ? JsonValue.NULL : objectAttribute;
}
// cache?
private JsonPointer toPointer(final String string) {
if (cachePointers) {
return pointers.computeIfAbsent(string, this::doToPointer);
}
return doToPointer(string);
}
private JsonPointer doToPointer(final String string) {
return provider.createPointer(
(!string.startsWith("/") ? "/" : "") +
string.replace('.', '/'));
}
// same as JsonCollector one except it uses this builderFactory instead of default one which goes through the SPI
private Collector<JsonValue, JsonArrayBuilder, JsonArray> toArray() {
return Collector.of(builderFactory::createArrayBuilder, JsonArrayBuilder::add, JsonArrayBuilder::addAll, JsonArrayBuilder::build);
}
}
|
googleapis/google-cloud-java | 36,717 | java-billingbudgets/proto-google-cloud-billingbudgets-v1/src/main/java/com/google/cloud/billing/budgets/v1/ListBudgetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/billing/budgets/v1/budget_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.billing.budgets.v1;
/**
*
*
* <pre>
* Response for ListBudgets
* </pre>
*
* Protobuf type {@code google.cloud.billing.budgets.v1.ListBudgetsResponse}
*/
public final class ListBudgetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.billing.budgets.v1.ListBudgetsResponse)
ListBudgetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBudgetsResponse.newBuilder() to construct.
private ListBudgetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBudgetsResponse() {
budgets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBudgetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.budgets.v1.BudgetServiceProto
.internal_static_google_cloud_billing_budgets_v1_ListBudgetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.budgets.v1.BudgetServiceProto
.internal_static_google_cloud_billing_budgets_v1_ListBudgetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.budgets.v1.ListBudgetsResponse.class,
com.google.cloud.billing.budgets.v1.ListBudgetsResponse.Builder.class);
}
public static final int BUDGETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.billing.budgets.v1.Budget> budgets_;
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.billing.budgets.v1.Budget> getBudgetsList() {
return budgets_;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.billing.budgets.v1.BudgetOrBuilder>
getBudgetsOrBuilderList() {
return budgets_;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
@java.lang.Override
public int getBudgetsCount() {
return budgets_.size();
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.billing.budgets.v1.Budget getBudgets(int index) {
return budgets_.get(index);
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.billing.budgets.v1.BudgetOrBuilder getBudgetsOrBuilder(int index) {
return budgets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < budgets_.size(); i++) {
output.writeMessage(1, budgets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < budgets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, budgets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.billing.budgets.v1.ListBudgetsResponse)) {
return super.equals(obj);
}
com.google.cloud.billing.budgets.v1.ListBudgetsResponse other =
(com.google.cloud.billing.budgets.v1.ListBudgetsResponse) obj;
if (!getBudgetsList().equals(other.getBudgetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getBudgetsCount() > 0) {
hash = (37 * hash) + BUDGETS_FIELD_NUMBER;
hash = (53 * hash) + getBudgetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.billing.budgets.v1.ListBudgetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListBudgets
* </pre>
*
* Protobuf type {@code google.cloud.billing.budgets.v1.ListBudgetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.billing.budgets.v1.ListBudgetsResponse)
com.google.cloud.billing.budgets.v1.ListBudgetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.budgets.v1.BudgetServiceProto
.internal_static_google_cloud_billing_budgets_v1_ListBudgetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.budgets.v1.BudgetServiceProto
.internal_static_google_cloud_billing_budgets_v1_ListBudgetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.budgets.v1.ListBudgetsResponse.class,
com.google.cloud.billing.budgets.v1.ListBudgetsResponse.Builder.class);
}
// Construct using com.google.cloud.billing.budgets.v1.ListBudgetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (budgetsBuilder_ == null) {
budgets_ = java.util.Collections.emptyList();
} else {
budgets_ = null;
budgetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.billing.budgets.v1.BudgetServiceProto
.internal_static_google_cloud_billing_budgets_v1_ListBudgetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.billing.budgets.v1.ListBudgetsResponse getDefaultInstanceForType() {
return com.google.cloud.billing.budgets.v1.ListBudgetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.billing.budgets.v1.ListBudgetsResponse build() {
com.google.cloud.billing.budgets.v1.ListBudgetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.billing.budgets.v1.ListBudgetsResponse buildPartial() {
com.google.cloud.billing.budgets.v1.ListBudgetsResponse result =
new com.google.cloud.billing.budgets.v1.ListBudgetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.billing.budgets.v1.ListBudgetsResponse result) {
if (budgetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
budgets_ = java.util.Collections.unmodifiableList(budgets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.budgets_ = budgets_;
} else {
result.budgets_ = budgetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.billing.budgets.v1.ListBudgetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.billing.budgets.v1.ListBudgetsResponse) {
return mergeFrom((com.google.cloud.billing.budgets.v1.ListBudgetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.billing.budgets.v1.ListBudgetsResponse other) {
if (other == com.google.cloud.billing.budgets.v1.ListBudgetsResponse.getDefaultInstance())
return this;
if (budgetsBuilder_ == null) {
if (!other.budgets_.isEmpty()) {
if (budgets_.isEmpty()) {
budgets_ = other.budgets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureBudgetsIsMutable();
budgets_.addAll(other.budgets_);
}
onChanged();
}
} else {
if (!other.budgets_.isEmpty()) {
if (budgetsBuilder_.isEmpty()) {
budgetsBuilder_.dispose();
budgetsBuilder_ = null;
budgets_ = other.budgets_;
bitField0_ = (bitField0_ & ~0x00000001);
budgetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getBudgetsFieldBuilder()
: null;
} else {
budgetsBuilder_.addAllMessages(other.budgets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.billing.budgets.v1.Budget m =
input.readMessage(
com.google.cloud.billing.budgets.v1.Budget.parser(), extensionRegistry);
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
budgets_.add(m);
} else {
budgetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.billing.budgets.v1.Budget> budgets_ =
java.util.Collections.emptyList();
private void ensureBudgetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
budgets_ = new java.util.ArrayList<com.google.cloud.billing.budgets.v1.Budget>(budgets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.budgets.v1.Budget,
com.google.cloud.billing.budgets.v1.Budget.Builder,
com.google.cloud.billing.budgets.v1.BudgetOrBuilder>
budgetsBuilder_;
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public java.util.List<com.google.cloud.billing.budgets.v1.Budget> getBudgetsList() {
if (budgetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(budgets_);
} else {
return budgetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public int getBudgetsCount() {
if (budgetsBuilder_ == null) {
return budgets_.size();
} else {
return budgetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public com.google.cloud.billing.budgets.v1.Budget getBudgets(int index) {
if (budgetsBuilder_ == null) {
return budgets_.get(index);
} else {
return budgetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder setBudgets(int index, com.google.cloud.billing.budgets.v1.Budget value) {
if (budgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBudgetsIsMutable();
budgets_.set(index, value);
onChanged();
} else {
budgetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder setBudgets(
int index, com.google.cloud.billing.budgets.v1.Budget.Builder builderForValue) {
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
budgets_.set(index, builderForValue.build());
onChanged();
} else {
budgetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder addBudgets(com.google.cloud.billing.budgets.v1.Budget value) {
if (budgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBudgetsIsMutable();
budgets_.add(value);
onChanged();
} else {
budgetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder addBudgets(int index, com.google.cloud.billing.budgets.v1.Budget value) {
if (budgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBudgetsIsMutable();
budgets_.add(index, value);
onChanged();
} else {
budgetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder addBudgets(com.google.cloud.billing.budgets.v1.Budget.Builder builderForValue) {
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
budgets_.add(builderForValue.build());
onChanged();
} else {
budgetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder addBudgets(
int index, com.google.cloud.billing.budgets.v1.Budget.Builder builderForValue) {
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
budgets_.add(index, builderForValue.build());
onChanged();
} else {
budgetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder addAllBudgets(
java.lang.Iterable<? extends com.google.cloud.billing.budgets.v1.Budget> values) {
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, budgets_);
onChanged();
} else {
budgetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder clearBudgets() {
if (budgetsBuilder_ == null) {
budgets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
budgetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public Builder removeBudgets(int index) {
if (budgetsBuilder_ == null) {
ensureBudgetsIsMutable();
budgets_.remove(index);
onChanged();
} else {
budgetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public com.google.cloud.billing.budgets.v1.Budget.Builder getBudgetsBuilder(int index) {
return getBudgetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public com.google.cloud.billing.budgets.v1.BudgetOrBuilder getBudgetsOrBuilder(int index) {
if (budgetsBuilder_ == null) {
return budgets_.get(index);
} else {
return budgetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.billing.budgets.v1.BudgetOrBuilder>
getBudgetsOrBuilderList() {
if (budgetsBuilder_ != null) {
return budgetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(budgets_);
}
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public com.google.cloud.billing.budgets.v1.Budget.Builder addBudgetsBuilder() {
return getBudgetsFieldBuilder()
.addBuilder(com.google.cloud.billing.budgets.v1.Budget.getDefaultInstance());
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public com.google.cloud.billing.budgets.v1.Budget.Builder addBudgetsBuilder(int index) {
return getBudgetsFieldBuilder()
.addBuilder(index, com.google.cloud.billing.budgets.v1.Budget.getDefaultInstance());
}
/**
*
*
* <pre>
* List of the budgets owned by the requested billing account.
* </pre>
*
* <code>repeated .google.cloud.billing.budgets.v1.Budget budgets = 1;</code>
*/
public java.util.List<com.google.cloud.billing.budgets.v1.Budget.Builder>
getBudgetsBuilderList() {
return getBudgetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.budgets.v1.Budget,
com.google.cloud.billing.budgets.v1.Budget.Builder,
com.google.cloud.billing.budgets.v1.BudgetOrBuilder>
getBudgetsFieldBuilder() {
if (budgetsBuilder_ == null) {
budgetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.budgets.v1.Budget,
com.google.cloud.billing.budgets.v1.Budget.Builder,
com.google.cloud.billing.budgets.v1.BudgetOrBuilder>(
budgets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
budgets_ = null;
}
return budgetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If not empty, indicates that there may be more budgets that match the
* request; this value should be passed in a new `ListBudgetsRequest`.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.billing.budgets.v1.ListBudgetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1.ListBudgetsResponse)
private static final com.google.cloud.billing.budgets.v1.ListBudgetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.billing.budgets.v1.ListBudgetsResponse();
}
public static com.google.cloud.billing.budgets.v1.ListBudgetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBudgetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListBudgetsResponse>() {
@java.lang.Override
public ListBudgetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBudgetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBudgetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.billing.budgets.v1.ListBudgetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,713 | java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/PullGitCommitsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1;
/**
*
*
* <pre>
* `PullGitCommits` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.PullGitCommitsRequest}
*/
public final class PullGitCommitsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.PullGitCommitsRequest)
PullGitCommitsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use PullGitCommitsRequest.newBuilder() to construct.
private PullGitCommitsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PullGitCommitsRequest() {
name_ = "";
remoteBranch_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PullGitCommitsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_PullGitCommitsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_PullGitCommitsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.PullGitCommitsRequest.class,
com.google.cloud.dataform.v1.PullGitCommitsRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REMOTE_BRANCH_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object remoteBranch_ = "";
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The remoteBranch.
*/
@java.lang.Override
public java.lang.String getRemoteBranch() {
java.lang.Object ref = remoteBranch_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
remoteBranch_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for remoteBranch.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRemoteBranchBytes() {
java.lang.Object ref = remoteBranch_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
remoteBranch_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AUTHOR_FIELD_NUMBER = 3;
private com.google.cloud.dataform.v1.CommitAuthor author_;
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the author field is set.
*/
@java.lang.Override
public boolean hasAuthor() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The author.
*/
@java.lang.Override
public com.google.cloud.dataform.v1.CommitAuthor getAuthor() {
return author_ == null
? com.google.cloud.dataform.v1.CommitAuthor.getDefaultInstance()
: author_;
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.CommitAuthorOrBuilder getAuthorOrBuilder() {
return author_ == null
? com.google.cloud.dataform.v1.CommitAuthor.getDefaultInstance()
: author_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(remoteBranch_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, remoteBranch_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getAuthor());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(remoteBranch_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, remoteBranch_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getAuthor());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1.PullGitCommitsRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1.PullGitCommitsRequest other =
(com.google.cloud.dataform.v1.PullGitCommitsRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRemoteBranch().equals(other.getRemoteBranch())) return false;
if (hasAuthor() != other.hasAuthor()) return false;
if (hasAuthor()) {
if (!getAuthor().equals(other.getAuthor())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REMOTE_BRANCH_FIELD_NUMBER;
hash = (53 * hash) + getRemoteBranch().hashCode();
if (hasAuthor()) {
hash = (37 * hash) + AUTHOR_FIELD_NUMBER;
hash = (53 * hash) + getAuthor().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataform.v1.PullGitCommitsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `PullGitCommits` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.PullGitCommitsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.PullGitCommitsRequest)
com.google.cloud.dataform.v1.PullGitCommitsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_PullGitCommitsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_PullGitCommitsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.PullGitCommitsRequest.class,
com.google.cloud.dataform.v1.PullGitCommitsRequest.Builder.class);
}
// Construct using com.google.cloud.dataform.v1.PullGitCommitsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAuthorFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
remoteBranch_ = "";
author_ = null;
if (authorBuilder_ != null) {
authorBuilder_.dispose();
authorBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_PullGitCommitsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1.PullGitCommitsRequest getDefaultInstanceForType() {
return com.google.cloud.dataform.v1.PullGitCommitsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1.PullGitCommitsRequest build() {
com.google.cloud.dataform.v1.PullGitCommitsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1.PullGitCommitsRequest buildPartial() {
com.google.cloud.dataform.v1.PullGitCommitsRequest result =
new com.google.cloud.dataform.v1.PullGitCommitsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataform.v1.PullGitCommitsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.remoteBranch_ = remoteBranch_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.author_ = authorBuilder_ == null ? author_ : authorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1.PullGitCommitsRequest) {
return mergeFrom((com.google.cloud.dataform.v1.PullGitCommitsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1.PullGitCommitsRequest other) {
if (other == com.google.cloud.dataform.v1.PullGitCommitsRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRemoteBranch().isEmpty()) {
remoteBranch_ = other.remoteBranch_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasAuthor()) {
mergeAuthor(other.getAuthor());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
remoteBranch_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getAuthorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace's name.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object remoteBranch_ = "";
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The remoteBranch.
*/
public java.lang.String getRemoteBranch() {
java.lang.Object ref = remoteBranch_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
remoteBranch_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for remoteBranch.
*/
public com.google.protobuf.ByteString getRemoteBranchBytes() {
java.lang.Object ref = remoteBranch_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
remoteBranch_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The remoteBranch to set.
* @return This builder for chaining.
*/
public Builder setRemoteBranch(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
remoteBranch_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRemoteBranch() {
remoteBranch_ = getDefaultInstance().getRemoteBranch();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the branch in the Git remote from which to pull
* commits. If left unset, the repository's default branch name will be used.
* </pre>
*
* <code>string remote_branch = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for remoteBranch to set.
* @return This builder for chaining.
*/
public Builder setRemoteBranchBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
remoteBranch_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.dataform.v1.CommitAuthor author_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1.CommitAuthor,
com.google.cloud.dataform.v1.CommitAuthor.Builder,
com.google.cloud.dataform.v1.CommitAuthorOrBuilder>
authorBuilder_;
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the author field is set.
*/
public boolean hasAuthor() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The author.
*/
public com.google.cloud.dataform.v1.CommitAuthor getAuthor() {
if (authorBuilder_ == null) {
return author_ == null
? com.google.cloud.dataform.v1.CommitAuthor.getDefaultInstance()
: author_;
} else {
return authorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAuthor(com.google.cloud.dataform.v1.CommitAuthor value) {
if (authorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
author_ = value;
} else {
authorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAuthor(com.google.cloud.dataform.v1.CommitAuthor.Builder builderForValue) {
if (authorBuilder_ == null) {
author_ = builderForValue.build();
} else {
authorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAuthor(com.google.cloud.dataform.v1.CommitAuthor value) {
if (authorBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& author_ != null
&& author_ != com.google.cloud.dataform.v1.CommitAuthor.getDefaultInstance()) {
getAuthorBuilder().mergeFrom(value);
} else {
author_ = value;
}
} else {
authorBuilder_.mergeFrom(value);
}
if (author_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAuthor() {
bitField0_ = (bitField0_ & ~0x00000004);
author_ = null;
if (authorBuilder_ != null) {
authorBuilder_.dispose();
authorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1.CommitAuthor.Builder getAuthorBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getAuthorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1.CommitAuthorOrBuilder getAuthorOrBuilder() {
if (authorBuilder_ != null) {
return authorBuilder_.getMessageOrBuilder();
} else {
return author_ == null
? com.google.cloud.dataform.v1.CommitAuthor.getDefaultInstance()
: author_;
}
}
/**
*
*
* <pre>
* Required. The author of any merge commit which may be created as a result
* of merging fetched Git commits into this workspace.
* </pre>
*
* <code>
* .google.cloud.dataform.v1.CommitAuthor author = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1.CommitAuthor,
com.google.cloud.dataform.v1.CommitAuthor.Builder,
com.google.cloud.dataform.v1.CommitAuthorOrBuilder>
getAuthorFieldBuilder() {
if (authorBuilder_ == null) {
authorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1.CommitAuthor,
com.google.cloud.dataform.v1.CommitAuthor.Builder,
com.google.cloud.dataform.v1.CommitAuthorOrBuilder>(
getAuthor(), getParentForChildren(), isClean());
author_ = null;
}
return authorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.PullGitCommitsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.PullGitCommitsRequest)
private static final com.google.cloud.dataform.v1.PullGitCommitsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.PullGitCommitsRequest();
}
public static com.google.cloud.dataform.v1.PullGitCommitsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PullGitCommitsRequest> PARSER =
new com.google.protobuf.AbstractParser<PullGitCommitsRequest>() {
@java.lang.Override
public PullGitCommitsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PullGitCommitsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PullGitCommitsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1.PullGitCommitsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,761 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/UpdateServingConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/serving_config_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Request for UpdateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UpdateServingConfigRequest}
*/
public final class UpdateServingConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.UpdateServingConfigRequest)
UpdateServingConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateServingConfigRequest.newBuilder() to construct.
private UpdateServingConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateServingConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateServingConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ServingConfigServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ServingConfigServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.class,
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.Builder.class);
}
private int bitField0_;
public static final int SERVING_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.discoveryengine.v1.ServingConfig servingConfig_;
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
@java.lang.Override
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ServingConfig getServingConfig() {
return servingConfig_ == null
? com.google.cloud.discoveryengine.v1.ServingConfig.getDefaultInstance()
: servingConfig_;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ServingConfigOrBuilder getServingConfigOrBuilder() {
return servingConfig_ == null
? com.google.cloud.discoveryengine.v1.ServingConfig.getDefaultInstance()
: servingConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getServingConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getServingConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest other =
(com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest) obj;
if (hasServingConfig() != other.hasServingConfig()) return false;
if (hasServingConfig()) {
if (!getServingConfig().equals(other.getServingConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasServingConfig()) {
hash = (37 * hash) + SERVING_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getServingConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for UpdateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UpdateServingConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.UpdateServingConfigRequest)
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ServingConfigServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ServingConfigServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.class,
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getServingConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ServingConfigServiceProto
.internal_static_google_cloud_discoveryengine_v1_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest build() {
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest buildPartial() {
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest result =
new com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.servingConfig_ =
servingConfigBuilder_ == null ? servingConfig_ : servingConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest other) {
if (other
== com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest.getDefaultInstance())
return this;
if (other.hasServingConfig()) {
mergeServingConfig(other.getServingConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getServingConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.discoveryengine.v1.ServingConfig servingConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.ServingConfig,
com.google.cloud.discoveryengine.v1.ServingConfig.Builder,
com.google.cloud.discoveryengine.v1.ServingConfigOrBuilder>
servingConfigBuilder_;
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
public com.google.cloud.discoveryengine.v1.ServingConfig getServingConfig() {
if (servingConfigBuilder_ == null) {
return servingConfig_ == null
? com.google.cloud.discoveryengine.v1.ServingConfig.getDefaultInstance()
: servingConfig_;
} else {
return servingConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(com.google.cloud.discoveryengine.v1.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
servingConfig_ = value;
} else {
servingConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(
com.google.cloud.discoveryengine.v1.ServingConfig.Builder builderForValue) {
if (servingConfigBuilder_ == null) {
servingConfig_ = builderForValue.build();
} else {
servingConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeServingConfig(com.google.cloud.discoveryengine.v1.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& servingConfig_ != null
&& servingConfig_
!= com.google.cloud.discoveryengine.v1.ServingConfig.getDefaultInstance()) {
getServingConfigBuilder().mergeFrom(value);
} else {
servingConfig_ = value;
}
} else {
servingConfigBuilder_.mergeFrom(value);
}
if (servingConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearServingConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.ServingConfig.Builder getServingConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getServingConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.ServingConfigOrBuilder getServingConfigOrBuilder() {
if (servingConfigBuilder_ != null) {
return servingConfigBuilder_.getMessageOrBuilder();
} else {
return servingConfig_ == null
? com.google.cloud.discoveryengine.v1.ServingConfig.getDefaultInstance()
: servingConfig_;
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.ServingConfig,
com.google.cloud.discoveryengine.v1.ServingConfig.Builder,
com.google.cloud.discoveryengine.v1.ServingConfigOrBuilder>
getServingConfigFieldBuilder() {
if (servingConfigBuilder_ == null) {
servingConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.ServingConfig,
com.google.cloud.discoveryengine.v1.ServingConfig.Builder,
com.google.cloud.discoveryengine.v1.ServingConfigOrBuilder>(
getServingConfig(), getParentForChildren(), isClean());
servingConfig_ = null;
}
return servingConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to update.
* The following are NOT supported:
*
* * [ServingConfig.name][google.cloud.discoveryengine.v1.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.UpdateServingConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.UpdateServingConfigRequest)
private static final com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest();
}
public static com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateServingConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateServingConfigRequest>() {
@java.lang.Override
public UpdateServingConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateServingConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateServingConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UpdateServingConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,717 | java-recommender/proto-google-cloud-recommender-v1beta1/src/main/java/com/google/cloud/recommender/v1beta1/ListInsightsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1beta1/recommender_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1beta1;
/**
*
*
* <pre>
* Response to the `ListInsights` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.ListInsightsResponse}
*/
public final class ListInsightsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.ListInsightsResponse)
ListInsightsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInsightsResponse.newBuilder() to construct.
private ListInsightsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInsightsResponse() {
insights_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListInsightsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.ListInsightsResponse.class,
com.google.cloud.recommender.v1beta1.ListInsightsResponse.Builder.class);
}
public static final int INSIGHTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.recommender.v1beta1.Insight> insights_;
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.recommender.v1beta1.Insight> getInsightsList() {
return insights_;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.recommender.v1beta1.InsightOrBuilder>
getInsightsOrBuilderList() {
return insights_;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
@java.lang.Override
public int getInsightsCount() {
return insights_.size();
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.Insight getInsights(int index) {
return insights_.get(index);
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightOrBuilder getInsightsOrBuilder(int index) {
return insights_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < insights_.size(); i++) {
output.writeMessage(1, insights_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < insights_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, insights_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1beta1.ListInsightsResponse)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1beta1.ListInsightsResponse other =
(com.google.cloud.recommender.v1beta1.ListInsightsResponse) obj;
if (!getInsightsList().equals(other.getInsightsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getInsightsCount() > 0) {
hash = (37 * hash) + INSIGHTS_FIELD_NUMBER;
hash = (53 * hash) + getInsightsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommender.v1beta1.ListInsightsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response to the `ListInsights` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.ListInsightsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.ListInsightsResponse)
com.google.cloud.recommender.v1beta1.ListInsightsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.ListInsightsResponse.class,
com.google.cloud.recommender.v1beta1.ListInsightsResponse.Builder.class);
}
// Construct using com.google.cloud.recommender.v1beta1.ListInsightsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (insightsBuilder_ == null) {
insights_ = java.util.Collections.emptyList();
} else {
insights_ = null;
insightsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightsResponse getDefaultInstanceForType() {
return com.google.cloud.recommender.v1beta1.ListInsightsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightsResponse build() {
com.google.cloud.recommender.v1beta1.ListInsightsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightsResponse buildPartial() {
com.google.cloud.recommender.v1beta1.ListInsightsResponse result =
new com.google.cloud.recommender.v1beta1.ListInsightsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.recommender.v1beta1.ListInsightsResponse result) {
if (insightsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
insights_ = java.util.Collections.unmodifiableList(insights_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.insights_ = insights_;
} else {
result.insights_ = insightsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.recommender.v1beta1.ListInsightsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1beta1.ListInsightsResponse) {
return mergeFrom((com.google.cloud.recommender.v1beta1.ListInsightsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.recommender.v1beta1.ListInsightsResponse other) {
if (other == com.google.cloud.recommender.v1beta1.ListInsightsResponse.getDefaultInstance())
return this;
if (insightsBuilder_ == null) {
if (!other.insights_.isEmpty()) {
if (insights_.isEmpty()) {
insights_ = other.insights_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInsightsIsMutable();
insights_.addAll(other.insights_);
}
onChanged();
}
} else {
if (!other.insights_.isEmpty()) {
if (insightsBuilder_.isEmpty()) {
insightsBuilder_.dispose();
insightsBuilder_ = null;
insights_ = other.insights_;
bitField0_ = (bitField0_ & ~0x00000001);
insightsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getInsightsFieldBuilder()
: null;
} else {
insightsBuilder_.addAllMessages(other.insights_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.recommender.v1beta1.Insight m =
input.readMessage(
com.google.cloud.recommender.v1beta1.Insight.parser(), extensionRegistry);
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(m);
} else {
insightsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.recommender.v1beta1.Insight> insights_ =
java.util.Collections.emptyList();
private void ensureInsightsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
insights_ =
new java.util.ArrayList<com.google.cloud.recommender.v1beta1.Insight>(insights_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.Insight,
com.google.cloud.recommender.v1beta1.Insight.Builder,
com.google.cloud.recommender.v1beta1.InsightOrBuilder>
insightsBuilder_;
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1beta1.Insight> getInsightsList() {
if (insightsBuilder_ == null) {
return java.util.Collections.unmodifiableList(insights_);
} else {
return insightsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public int getInsightsCount() {
if (insightsBuilder_ == null) {
return insights_.size();
} else {
return insightsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.Insight getInsights(int index) {
if (insightsBuilder_ == null) {
return insights_.get(index);
} else {
return insightsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder setInsights(int index, com.google.cloud.recommender.v1beta1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.set(index, value);
onChanged();
} else {
insightsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder setInsights(
int index, com.google.cloud.recommender.v1beta1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.set(index, builderForValue.build());
onChanged();
} else {
insightsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder addInsights(com.google.cloud.recommender.v1beta1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.add(value);
onChanged();
} else {
insightsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder addInsights(int index, com.google.cloud.recommender.v1beta1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.add(index, value);
onChanged();
} else {
insightsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder addInsights(
com.google.cloud.recommender.v1beta1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(builderForValue.build());
onChanged();
} else {
insightsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder addInsights(
int index, com.google.cloud.recommender.v1beta1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(index, builderForValue.build());
onChanged();
} else {
insightsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder addAllInsights(
java.lang.Iterable<? extends com.google.cloud.recommender.v1beta1.Insight> values) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, insights_);
onChanged();
} else {
insightsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder clearInsights() {
if (insightsBuilder_ == null) {
insights_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
insightsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public Builder removeInsights(int index) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.remove(index);
onChanged();
} else {
insightsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.Insight.Builder getInsightsBuilder(int index) {
return getInsightsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightOrBuilder getInsightsOrBuilder(int index) {
if (insightsBuilder_ == null) {
return insights_.get(index);
} else {
return insightsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public java.util.List<? extends com.google.cloud.recommender.v1beta1.InsightOrBuilder>
getInsightsOrBuilderList() {
if (insightsBuilder_ != null) {
return insightsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(insights_);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.Insight.Builder addInsightsBuilder() {
return getInsightsFieldBuilder()
.addBuilder(com.google.cloud.recommender.v1beta1.Insight.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.Insight.Builder addInsightsBuilder(int index) {
return getInsightsFieldBuilder()
.addBuilder(index, com.google.cloud.recommender.v1beta1.Insight.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.Insight insights = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1beta1.Insight.Builder>
getInsightsBuilderList() {
return getInsightsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.Insight,
com.google.cloud.recommender.v1beta1.Insight.Builder,
com.google.cloud.recommender.v1beta1.InsightOrBuilder>
getInsightsFieldBuilder() {
if (insightsBuilder_ == null) {
insightsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.Insight,
com.google.cloud.recommender.v1beta1.Insight.Builder,
com.google.cloud.recommender.v1beta1.InsightOrBuilder>(
insights_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
insights_ = null;
}
return insightsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.ListInsightsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListInsightsResponse)
private static final com.google.cloud.recommender.v1beta1.ListInsightsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.ListInsightsResponse();
}
public static com.google.cloud.recommender.v1beta1.ListInsightsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInsightsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListInsightsResponse>() {
@java.lang.Override
public ListInsightsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInsightsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInsightsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,735 | java-redis/proto-google-cloud-redis-v1beta1/src/main/java/com/google/cloud/redis/v1beta1/WeeklyMaintenanceWindow.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/redis/v1beta1/cloud_redis.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.redis.v1beta1;
/**
*
*
* <pre>
* Time window in which disruptive maintenance updates occur. Non-disruptive
* updates can occur inside or outside this window.
* </pre>
*
* Protobuf type {@code google.cloud.redis.v1beta1.WeeklyMaintenanceWindow}
*/
public final class WeeklyMaintenanceWindow extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.redis.v1beta1.WeeklyMaintenanceWindow)
WeeklyMaintenanceWindowOrBuilder {
private static final long serialVersionUID = 0L;
// Use WeeklyMaintenanceWindow.newBuilder() to construct.
private WeeklyMaintenanceWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WeeklyMaintenanceWindow() {
day_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WeeklyMaintenanceWindow();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.v1beta1.CloudRedisServiceBetaProto
.internal_static_google_cloud_redis_v1beta1_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.v1beta1.CloudRedisServiceBetaProto
.internal_static_google_cloud_redis_v1beta1_WeeklyMaintenanceWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.class,
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.Builder.class);
}
private int bitField0_;
public static final int DAY_FIELD_NUMBER = 1;
private int day_ = 0;
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The enum numeric value on the wire for day.
*/
@java.lang.Override
public int getDayValue() {
return day_;
}
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The day.
*/
@java.lang.Override
public com.google.type.DayOfWeek getDay() {
com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_);
return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result;
}
public static final int START_TIME_FIELD_NUMBER = 2;
private com.google.type.TimeOfDay startTime_;
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the startTime field is set.
*/
@java.lang.Override
public boolean hasStartTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The startTime.
*/
@java.lang.Override
public com.google.type.TimeOfDay getStartTime() {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
public static final int DURATION_FIELD_NUMBER = 3;
private com.google.protobuf.Duration duration_;
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the duration field is set.
*/
@java.lang.Override
public boolean hasDuration() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The duration.
*/
@java.lang.Override
public com.google.protobuf.Duration getDuration() {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) {
output.writeEnum(1, day_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getStartTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getDuration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, day_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDuration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow)) {
return super.equals(obj);
}
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow other =
(com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow) obj;
if (day_ != other.day_) return false;
if (hasStartTime() != other.hasStartTime()) return false;
if (hasStartTime()) {
if (!getStartTime().equals(other.getStartTime())) return false;
}
if (hasDuration() != other.hasDuration()) return false;
if (hasDuration()) {
if (!getDuration().equals(other.getDuration())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DAY_FIELD_NUMBER;
hash = (53 * hash) + day_;
if (hasStartTime()) {
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartTime().hashCode();
}
if (hasDuration()) {
hash = (37 * hash) + DURATION_FIELD_NUMBER;
hash = (53 * hash) + getDuration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Time window in which disruptive maintenance updates occur. Non-disruptive
* updates can occur inside or outside this window.
* </pre>
*
* Protobuf type {@code google.cloud.redis.v1beta1.WeeklyMaintenanceWindow}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.redis.v1beta1.WeeklyMaintenanceWindow)
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.v1beta1.CloudRedisServiceBetaProto
.internal_static_google_cloud_redis_v1beta1_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.v1beta1.CloudRedisServiceBetaProto
.internal_static_google_cloud_redis_v1beta1_WeeklyMaintenanceWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.class,
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.Builder.class);
}
// Construct using com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getStartTimeFieldBuilder();
getDurationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
day_ = 0;
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
duration_ = null;
if (durationBuilder_ != null) {
durationBuilder_.dispose();
durationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.redis.v1beta1.CloudRedisServiceBetaProto
.internal_static_google_cloud_redis_v1beta1_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
public com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow getDefaultInstanceForType() {
return com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow build() {
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow buildPartial() {
com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow result =
new com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.day_ = day_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.duration_ = durationBuilder_ == null ? duration_ : durationBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow) {
return mergeFrom((com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow other) {
if (other == com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow.getDefaultInstance())
return this;
if (other.day_ != 0) {
setDayValue(other.getDayValue());
}
if (other.hasStartTime()) {
mergeStartTime(other.getStartTime());
}
if (other.hasDuration()) {
mergeDuration(other.getDuration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
day_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getDurationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int day_ = 0;
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The enum numeric value on the wire for day.
*/
@java.lang.Override
public int getDayValue() {
return day_;
}
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The enum numeric value on the wire for day to set.
* @return This builder for chaining.
*/
public Builder setDayValue(int value) {
day_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The day.
*/
@java.lang.Override
public com.google.type.DayOfWeek getDay() {
com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_);
return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The day to set.
* @return This builder for chaining.
*/
public Builder setDay(com.google.type.DayOfWeek value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
day_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The day of week that maintenance updates occur.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDay() {
bitField0_ = (bitField0_ & ~0x00000001);
day_ = 0;
onChanged();
return this;
}
private com.google.type.TimeOfDay startTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>
startTimeBuilder_;
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the startTime field is set.
*/
public boolean hasStartTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The startTime.
*/
public com.google.type.TimeOfDay getStartTime() {
if (startTimeBuilder_ == null) {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
} else {
return startTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setStartTime(com.google.type.TimeOfDay value) {
if (startTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
startTime_ = value;
} else {
startTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setStartTime(com.google.type.TimeOfDay.Builder builderForValue) {
if (startTimeBuilder_ == null) {
startTime_ = builderForValue.build();
} else {
startTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder mergeStartTime(com.google.type.TimeOfDay value) {
if (startTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& startTime_ != null
&& startTime_ != com.google.type.TimeOfDay.getDefaultInstance()) {
getStartTimeBuilder().mergeFrom(value);
} else {
startTime_ = value;
}
} else {
startTimeBuilder_.mergeFrom(value);
}
if (startTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearStartTime() {
bitField0_ = (bitField0_ & ~0x00000002);
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.type.TimeOfDay.Builder getStartTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStartTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() {
if (startTimeBuilder_ != null) {
return startTimeBuilder_.getMessageOrBuilder();
} else {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC time.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>
getStartTimeFieldBuilder() {
if (startTimeBuilder_ == null) {
startTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>(
getStartTime(), getParentForChildren(), isClean());
startTime_ = null;
}
return startTimeBuilder_;
}
private com.google.protobuf.Duration duration_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
durationBuilder_;
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the duration field is set.
*/
public boolean hasDuration() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The duration.
*/
public com.google.protobuf.Duration getDuration() {
if (durationBuilder_ == null) {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
} else {
return durationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setDuration(com.google.protobuf.Duration value) {
if (durationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
duration_ = value;
} else {
durationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setDuration(com.google.protobuf.Duration.Builder builderForValue) {
if (durationBuilder_ == null) {
duration_ = builderForValue.build();
} else {
durationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeDuration(com.google.protobuf.Duration value) {
if (durationBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& duration_ != null
&& duration_ != com.google.protobuf.Duration.getDefaultInstance()) {
getDurationBuilder().mergeFrom(value);
} else {
duration_ = value;
}
} else {
durationBuilder_.mergeFrom(value);
}
if (duration_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearDuration() {
bitField0_ = (bitField0_ & ~0x00000004);
duration_ = null;
if (durationBuilder_ != null) {
durationBuilder_.dispose();
durationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.Duration.Builder getDurationBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getDurationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() {
if (durationBuilder_ != null) {
return durationBuilder_.getMessageOrBuilder();
} else {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
}
/**
*
*
* <pre>
* Output only. Duration of the maintenance window. The current window is fixed at 1 hour.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getDurationFieldBuilder() {
if (durationBuilder_ == null) {
durationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getDuration(), getParentForChildren(), isClean());
duration_ = null;
}
return durationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.redis.v1beta1.WeeklyMaintenanceWindow)
}
// @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.WeeklyMaintenanceWindow)
private static final com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow();
}
public static com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WeeklyMaintenanceWindow> PARSER =
new com.google.protobuf.AbstractParser<WeeklyMaintenanceWindow>() {
@java.lang.Override
public WeeklyMaintenanceWindow parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WeeklyMaintenanceWindow> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WeeklyMaintenanceWindow> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.redis.v1beta1.WeeklyMaintenanceWindow getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,788 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateTcpRouteRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/tcp_route.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the UpdateTcpRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateTcpRouteRequest}
*/
public final class UpdateTcpRouteRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateTcpRouteRequest)
UpdateTcpRouteRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTcpRouteRequest.newBuilder() to construct.
private UpdateTcpRouteRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTcpRouteRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTcpRouteRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.TcpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTcpRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.TcpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTcpRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int TCP_ROUTE_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.TcpRoute tcpRoute_;
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tcpRoute field is set.
*/
@java.lang.Override
public boolean hasTcpRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tcpRoute.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.TcpRoute getTcpRoute() {
return tcpRoute_ == null
? com.google.cloud.networkservices.v1.TcpRoute.getDefaultInstance()
: tcpRoute_;
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.TcpRouteOrBuilder getTcpRouteOrBuilder() {
return tcpRoute_ == null
? com.google.cloud.networkservices.v1.TcpRoute.getDefaultInstance()
: tcpRoute_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTcpRoute());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTcpRoute());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateTcpRouteRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest other =
(com.google.cloud.networkservices.v1.UpdateTcpRouteRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasTcpRoute() != other.hasTcpRoute()) return false;
if (hasTcpRoute()) {
if (!getTcpRoute().equals(other.getTcpRoute())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasTcpRoute()) {
hash = (37 * hash) + TCP_ROUTE_FIELD_NUMBER;
hash = (53 * hash) + getTcpRoute().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the UpdateTcpRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateTcpRouteRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateTcpRouteRequest)
com.google.cloud.networkservices.v1.UpdateTcpRouteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.TcpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTcpRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.TcpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTcpRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getTcpRouteFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
tcpRoute_ = null;
if (tcpRouteBuilder_ != null) {
tcpRouteBuilder_.dispose();
tcpRouteBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.TcpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTcpRouteRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTcpRouteRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTcpRouteRequest build() {
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTcpRouteRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateTcpRouteRequest result =
new com.google.cloud.networkservices.v1.UpdateTcpRouteRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.UpdateTcpRouteRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.tcpRoute_ = tcpRouteBuilder_ == null ? tcpRoute_ : tcpRouteBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateTcpRouteRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateTcpRouteRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.UpdateTcpRouteRequest other) {
if (other == com.google.cloud.networkservices.v1.UpdateTcpRouteRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasTcpRoute()) {
mergeTcpRoute(other.getTcpRoute());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTcpRouteFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TcpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.TcpRoute tcpRoute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TcpRoute,
com.google.cloud.networkservices.v1.TcpRoute.Builder,
com.google.cloud.networkservices.v1.TcpRouteOrBuilder>
tcpRouteBuilder_;
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tcpRoute field is set.
*/
public boolean hasTcpRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tcpRoute.
*/
public com.google.cloud.networkservices.v1.TcpRoute getTcpRoute() {
if (tcpRouteBuilder_ == null) {
return tcpRoute_ == null
? com.google.cloud.networkservices.v1.TcpRoute.getDefaultInstance()
: tcpRoute_;
} else {
return tcpRouteBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTcpRoute(com.google.cloud.networkservices.v1.TcpRoute value) {
if (tcpRouteBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tcpRoute_ = value;
} else {
tcpRouteBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTcpRoute(
com.google.cloud.networkservices.v1.TcpRoute.Builder builderForValue) {
if (tcpRouteBuilder_ == null) {
tcpRoute_ = builderForValue.build();
} else {
tcpRouteBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTcpRoute(com.google.cloud.networkservices.v1.TcpRoute value) {
if (tcpRouteBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& tcpRoute_ != null
&& tcpRoute_ != com.google.cloud.networkservices.v1.TcpRoute.getDefaultInstance()) {
getTcpRouteBuilder().mergeFrom(value);
} else {
tcpRoute_ = value;
}
} else {
tcpRouteBuilder_.mergeFrom(value);
}
if (tcpRoute_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTcpRoute() {
bitField0_ = (bitField0_ & ~0x00000002);
tcpRoute_ = null;
if (tcpRouteBuilder_ != null) {
tcpRouteBuilder_.dispose();
tcpRouteBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.TcpRoute.Builder getTcpRouteBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTcpRouteFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.TcpRouteOrBuilder getTcpRouteOrBuilder() {
if (tcpRouteBuilder_ != null) {
return tcpRouteBuilder_.getMessageOrBuilder();
} else {
return tcpRoute_ == null
? com.google.cloud.networkservices.v1.TcpRoute.getDefaultInstance()
: tcpRoute_;
}
}
/**
*
*
* <pre>
* Required. Updated TcpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TcpRoute tcp_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TcpRoute,
com.google.cloud.networkservices.v1.TcpRoute.Builder,
com.google.cloud.networkservices.v1.TcpRouteOrBuilder>
getTcpRouteFieldBuilder() {
if (tcpRouteBuilder_ == null) {
tcpRouteBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TcpRoute,
com.google.cloud.networkservices.v1.TcpRoute.Builder,
com.google.cloud.networkservices.v1.TcpRouteOrBuilder>(
getTcpRoute(), getParentForChildren(), isClean());
tcpRoute_ = null;
}
return tcpRouteBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateTcpRouteRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateTcpRouteRequest)
private static final com.google.cloud.networkservices.v1.UpdateTcpRouteRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateTcpRouteRequest();
}
public static com.google.cloud.networkservices.v1.UpdateTcpRouteRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTcpRouteRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTcpRouteRequest>() {
@java.lang.Override
public UpdateTcpRouteRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTcpRouteRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTcpRouteRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTcpRouteRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,788 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateTlsRouteRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/tls_route.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the UpdateTlsRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateTlsRouteRequest}
*/
public final class UpdateTlsRouteRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateTlsRouteRequest)
UpdateTlsRouteRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTlsRouteRequest.newBuilder() to construct.
private UpdateTlsRouteRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTlsRouteRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTlsRouteRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.TlsRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTlsRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.TlsRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTlsRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int TLS_ROUTE_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.TlsRoute tlsRoute_;
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tlsRoute field is set.
*/
@java.lang.Override
public boolean hasTlsRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tlsRoute.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.TlsRoute getTlsRoute() {
return tlsRoute_ == null
? com.google.cloud.networkservices.v1.TlsRoute.getDefaultInstance()
: tlsRoute_;
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.TlsRouteOrBuilder getTlsRouteOrBuilder() {
return tlsRoute_ == null
? com.google.cloud.networkservices.v1.TlsRoute.getDefaultInstance()
: tlsRoute_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTlsRoute());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTlsRoute());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateTlsRouteRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest other =
(com.google.cloud.networkservices.v1.UpdateTlsRouteRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasTlsRoute() != other.hasTlsRoute()) return false;
if (hasTlsRoute()) {
if (!getTlsRoute().equals(other.getTlsRoute())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasTlsRoute()) {
hash = (37 * hash) + TLS_ROUTE_FIELD_NUMBER;
hash = (53 * hash) + getTlsRoute().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the UpdateTlsRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateTlsRouteRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateTlsRouteRequest)
com.google.cloud.networkservices.v1.UpdateTlsRouteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.TlsRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTlsRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.TlsRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTlsRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getTlsRouteFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
tlsRoute_ = null;
if (tlsRouteBuilder_ != null) {
tlsRouteBuilder_.dispose();
tlsRouteBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.TlsRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateTlsRouteRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTlsRouteRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTlsRouteRequest build() {
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTlsRouteRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateTlsRouteRequest result =
new com.google.cloud.networkservices.v1.UpdateTlsRouteRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.UpdateTlsRouteRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.tlsRoute_ = tlsRouteBuilder_ == null ? tlsRoute_ : tlsRouteBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateTlsRouteRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateTlsRouteRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.UpdateTlsRouteRequest other) {
if (other == com.google.cloud.networkservices.v1.UpdateTlsRouteRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasTlsRoute()) {
mergeTlsRoute(other.getTlsRoute());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTlsRouteFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* TlsRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.TlsRoute tlsRoute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TlsRoute,
com.google.cloud.networkservices.v1.TlsRoute.Builder,
com.google.cloud.networkservices.v1.TlsRouteOrBuilder>
tlsRouteBuilder_;
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tlsRoute field is set.
*/
public boolean hasTlsRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tlsRoute.
*/
public com.google.cloud.networkservices.v1.TlsRoute getTlsRoute() {
if (tlsRouteBuilder_ == null) {
return tlsRoute_ == null
? com.google.cloud.networkservices.v1.TlsRoute.getDefaultInstance()
: tlsRoute_;
} else {
return tlsRouteBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTlsRoute(com.google.cloud.networkservices.v1.TlsRoute value) {
if (tlsRouteBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tlsRoute_ = value;
} else {
tlsRouteBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTlsRoute(
com.google.cloud.networkservices.v1.TlsRoute.Builder builderForValue) {
if (tlsRouteBuilder_ == null) {
tlsRoute_ = builderForValue.build();
} else {
tlsRouteBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTlsRoute(com.google.cloud.networkservices.v1.TlsRoute value) {
if (tlsRouteBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& tlsRoute_ != null
&& tlsRoute_ != com.google.cloud.networkservices.v1.TlsRoute.getDefaultInstance()) {
getTlsRouteBuilder().mergeFrom(value);
} else {
tlsRoute_ = value;
}
} else {
tlsRouteBuilder_.mergeFrom(value);
}
if (tlsRoute_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTlsRoute() {
bitField0_ = (bitField0_ & ~0x00000002);
tlsRoute_ = null;
if (tlsRouteBuilder_ != null) {
tlsRouteBuilder_.dispose();
tlsRouteBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.TlsRoute.Builder getTlsRouteBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTlsRouteFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.TlsRouteOrBuilder getTlsRouteOrBuilder() {
if (tlsRouteBuilder_ != null) {
return tlsRouteBuilder_.getMessageOrBuilder();
} else {
return tlsRoute_ == null
? com.google.cloud.networkservices.v1.TlsRoute.getDefaultInstance()
: tlsRoute_;
}
}
/**
*
*
* <pre>
* Required. Updated TlsRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.TlsRoute tls_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TlsRoute,
com.google.cloud.networkservices.v1.TlsRoute.Builder,
com.google.cloud.networkservices.v1.TlsRouteOrBuilder>
getTlsRouteFieldBuilder() {
if (tlsRouteBuilder_ == null) {
tlsRouteBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.TlsRoute,
com.google.cloud.networkservices.v1.TlsRoute.Builder,
com.google.cloud.networkservices.v1.TlsRouteOrBuilder>(
getTlsRoute(), getParentForChildren(), isClean());
tlsRoute_ = null;
}
return tlsRouteBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateTlsRouteRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateTlsRouteRequest)
private static final com.google.cloud.networkservices.v1.UpdateTlsRouteRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateTlsRouteRequest();
}
public static com.google.cloud.networkservices.v1.UpdateTlsRouteRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTlsRouteRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTlsRouteRequest>() {
@java.lang.Override
public UpdateTlsRouteRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTlsRouteRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTlsRouteRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateTlsRouteRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,934 | java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1alpha/CompletionServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1alpha;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.httpjson.longrunning.OperationsClient;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.discoveryengine.v1alpha.stub.CompletionServiceStub;
import com.google.cloud.discoveryengine.v1alpha.stub.CompletionServiceStubSettings;
import com.google.longrunning.Operation;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service for Auto-Completion.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* CompleteQueryRequest request =
* CompleteQueryRequest.newBuilder()
* .setDataStore(
* DataStoreName.ofProjectLocationDataStoreName(
* "[PROJECT]", "[LOCATION]", "[DATA_STORE]")
* .toString())
* .setQuery("query107944136")
* .setQueryModel("queryModel-184930495")
* .setUserPseudoId("userPseudoId-1155274652")
* .setIncludeTailSuggestions(true)
* .build();
* CompleteQueryResponse response = completionServiceClient.completeQuery(request);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the CompletionServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> CompleteQuery</td>
* <td><p> Completes the specified user input with keyword suggestions.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> completeQuery(CompleteQueryRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> completeQueryCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ImportSuggestionDenyListEntries</td>
* <td><p> Imports all [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a DataStore.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> importSuggestionDenyListEntriesAsync(ImportSuggestionDenyListEntriesRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> importSuggestionDenyListEntriesOperationCallable()
* <li><p> importSuggestionDenyListEntriesCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> PurgeSuggestionDenyListEntries</td>
* <td><p> Permanently deletes all [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a DataStore.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> purgeSuggestionDenyListEntriesAsync(PurgeSuggestionDenyListEntriesRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> purgeSuggestionDenyListEntriesOperationCallable()
* <li><p> purgeSuggestionDenyListEntriesCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ImportCompletionSuggestions</td>
* <td><p> Imports [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for a DataStore.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> importCompletionSuggestionsAsync(ImportCompletionSuggestionsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> importCompletionSuggestionsOperationCallable()
* <li><p> importCompletionSuggestionsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> PurgeCompletionSuggestions</td>
* <td><p> Permanently deletes all [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for a DataStore.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> purgeCompletionSuggestionsAsync(PurgeCompletionSuggestionsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> purgeCompletionSuggestionsOperationCallable()
* <li><p> purgeCompletionSuggestionsCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of CompletionServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CompletionServiceSettings completionServiceSettings =
* CompletionServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* CompletionServiceClient completionServiceClient =
* CompletionServiceClient.create(completionServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CompletionServiceSettings completionServiceSettings =
* CompletionServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* CompletionServiceClient completionServiceClient =
* CompletionServiceClient.create(completionServiceSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CompletionServiceSettings completionServiceSettings =
* CompletionServiceSettings.newHttpJsonBuilder().build();
* CompletionServiceClient completionServiceClient =
* CompletionServiceClient.create(completionServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class CompletionServiceClient implements BackgroundResource {
private final CompletionServiceSettings settings;
private final CompletionServiceStub stub;
private final OperationsClient httpJsonOperationsClient;
private final com.google.longrunning.OperationsClient operationsClient;
/** Constructs an instance of CompletionServiceClient with default settings. */
public static final CompletionServiceClient create() throws IOException {
return create(CompletionServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of CompletionServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final CompletionServiceClient create(CompletionServiceSettings settings)
throws IOException {
return new CompletionServiceClient(settings);
}
/**
* Constructs an instance of CompletionServiceClient, using the given stub for making calls. This
* is for advanced usage - prefer using create(CompletionServiceSettings).
*/
public static final CompletionServiceClient create(CompletionServiceStub stub) {
return new CompletionServiceClient(stub);
}
/**
* Constructs an instance of CompletionServiceClient, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected CompletionServiceClient(CompletionServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((CompletionServiceStubSettings) settings.getStubSettings()).createStub();
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
protected CompletionServiceClient(CompletionServiceStub stub) {
this.settings = null;
this.stub = stub;
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
public final CompletionServiceSettings getSettings() {
return settings;
}
public CompletionServiceStub getStub() {
return stub;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
public final com.google.longrunning.OperationsClient getOperationsClient() {
return operationsClient;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
@BetaApi
public final OperationsClient getHttpJsonOperationsClient() {
return httpJsonOperationsClient;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Completes the specified user input with keyword suggestions.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* CompleteQueryRequest request =
* CompleteQueryRequest.newBuilder()
* .setDataStore(
* DataStoreName.ofProjectLocationDataStoreName(
* "[PROJECT]", "[LOCATION]", "[DATA_STORE]")
* .toString())
* .setQuery("query107944136")
* .setQueryModel("queryModel-184930495")
* .setUserPseudoId("userPseudoId-1155274652")
* .setIncludeTailSuggestions(true)
* .build();
* CompleteQueryResponse response = completionServiceClient.completeQuery(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final CompleteQueryResponse completeQuery(CompleteQueryRequest request) {
return completeQueryCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Completes the specified user input with keyword suggestions.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* CompleteQueryRequest request =
* CompleteQueryRequest.newBuilder()
* .setDataStore(
* DataStoreName.ofProjectLocationDataStoreName(
* "[PROJECT]", "[LOCATION]", "[DATA_STORE]")
* .toString())
* .setQuery("query107944136")
* .setQueryModel("queryModel-184930495")
* .setUserPseudoId("userPseudoId-1155274652")
* .setIncludeTailSuggestions(true)
* .build();
* ApiFuture<CompleteQueryResponse> future =
* completionServiceClient.completeQueryCallable().futureCall(request);
* // Do something.
* CompleteQueryResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<CompleteQueryRequest, CompleteQueryResponse> completeQueryCallable() {
return stub.completeQueryCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportSuggestionDenyListEntriesRequest request =
* ImportSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* ImportSuggestionDenyListEntriesResponse response =
* completionServiceClient.importSuggestionDenyListEntriesAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<
ImportSuggestionDenyListEntriesResponse, ImportSuggestionDenyListEntriesMetadata>
importSuggestionDenyListEntriesAsync(ImportSuggestionDenyListEntriesRequest request) {
return importSuggestionDenyListEntriesOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportSuggestionDenyListEntriesRequest request =
* ImportSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* OperationFuture<
* ImportSuggestionDenyListEntriesResponse, ImportSuggestionDenyListEntriesMetadata>
* future =
* completionServiceClient
* .importSuggestionDenyListEntriesOperationCallable()
* .futureCall(request);
* // Do something.
* ImportSuggestionDenyListEntriesResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
ImportSuggestionDenyListEntriesRequest,
ImportSuggestionDenyListEntriesResponse,
ImportSuggestionDenyListEntriesMetadata>
importSuggestionDenyListEntriesOperationCallable() {
return stub.importSuggestionDenyListEntriesOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportSuggestionDenyListEntriesRequest request =
* ImportSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* ApiFuture<Operation> future =
* completionServiceClient.importSuggestionDenyListEntriesCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<ImportSuggestionDenyListEntriesRequest, Operation>
importSuggestionDenyListEntriesCallable() {
return stub.importSuggestionDenyListEntriesCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeSuggestionDenyListEntriesRequest request =
* PurgeSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* PurgeSuggestionDenyListEntriesResponse response =
* completionServiceClient.purgeSuggestionDenyListEntriesAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<
PurgeSuggestionDenyListEntriesResponse, PurgeSuggestionDenyListEntriesMetadata>
purgeSuggestionDenyListEntriesAsync(PurgeSuggestionDenyListEntriesRequest request) {
return purgeSuggestionDenyListEntriesOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeSuggestionDenyListEntriesRequest request =
* PurgeSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* OperationFuture<
* PurgeSuggestionDenyListEntriesResponse, PurgeSuggestionDenyListEntriesMetadata>
* future =
* completionServiceClient
* .purgeSuggestionDenyListEntriesOperationCallable()
* .futureCall(request);
* // Do something.
* PurgeSuggestionDenyListEntriesResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
PurgeSuggestionDenyListEntriesRequest,
PurgeSuggestionDenyListEntriesResponse,
PurgeSuggestionDenyListEntriesMetadata>
purgeSuggestionDenyListEntriesOperationCallable() {
return stub.purgeSuggestionDenyListEntriesOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [SuggestionDenyListEntry][google.cloud.discoveryengine.v1alpha.SuggestionDenyListEntry] for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeSuggestionDenyListEntriesRequest request =
* PurgeSuggestionDenyListEntriesRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* ApiFuture<Operation> future =
* completionServiceClient.purgeSuggestionDenyListEntriesCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<PurgeSuggestionDenyListEntriesRequest, Operation>
purgeSuggestionDenyListEntriesCallable() {
return stub.purgeSuggestionDenyListEntriesCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for
* a DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportCompletionSuggestionsRequest request =
* ImportCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .setErrorConfig(ImportErrorConfig.newBuilder().build())
* .build();
* ImportCompletionSuggestionsResponse response =
* completionServiceClient.importCompletionSuggestionsAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<
ImportCompletionSuggestionsResponse, ImportCompletionSuggestionsMetadata>
importCompletionSuggestionsAsync(ImportCompletionSuggestionsRequest request) {
return importCompletionSuggestionsOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for
* a DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportCompletionSuggestionsRequest request =
* ImportCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .setErrorConfig(ImportErrorConfig.newBuilder().build())
* .build();
* OperationFuture<ImportCompletionSuggestionsResponse, ImportCompletionSuggestionsMetadata>
* future =
* completionServiceClient
* .importCompletionSuggestionsOperationCallable()
* .futureCall(request);
* // Do something.
* ImportCompletionSuggestionsResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
ImportCompletionSuggestionsRequest,
ImportCompletionSuggestionsResponse,
ImportCompletionSuggestionsMetadata>
importCompletionSuggestionsOperationCallable() {
return stub.importCompletionSuggestionsOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Imports [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for
* a DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* ImportCompletionSuggestionsRequest request =
* ImportCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .setErrorConfig(ImportErrorConfig.newBuilder().build())
* .build();
* ApiFuture<Operation> future =
* completionServiceClient.importCompletionSuggestionsCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<ImportCompletionSuggestionsRequest, Operation>
importCompletionSuggestionsCallable() {
return stub.importCompletionSuggestionsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeCompletionSuggestionsRequest request =
* PurgeCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* PurgeCompletionSuggestionsResponse response =
* completionServiceClient.purgeCompletionSuggestionsAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<
PurgeCompletionSuggestionsResponse, PurgeCompletionSuggestionsMetadata>
purgeCompletionSuggestionsAsync(PurgeCompletionSuggestionsRequest request) {
return purgeCompletionSuggestionsOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeCompletionSuggestionsRequest request =
* PurgeCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* OperationFuture<PurgeCompletionSuggestionsResponse, PurgeCompletionSuggestionsMetadata>
* future =
* completionServiceClient
* .purgeCompletionSuggestionsOperationCallable()
* .futureCall(request);
* // Do something.
* PurgeCompletionSuggestionsResponse response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
PurgeCompletionSuggestionsRequest,
PurgeCompletionSuggestionsResponse,
PurgeCompletionSuggestionsMetadata>
purgeCompletionSuggestionsOperationCallable() {
return stub.purgeCompletionSuggestionsOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Permanently deletes all
* [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s for a
* DataStore.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (CompletionServiceClient completionServiceClient = CompletionServiceClient.create()) {
* PurgeCompletionSuggestionsRequest request =
* PurgeCompletionSuggestionsRequest.newBuilder()
* .setParent(
* DataStoreName.ofProjectLocationCollectionDataStoreName(
* "[PROJECT]", "[LOCATION]", "[COLLECTION]", "[DATA_STORE]")
* .toString())
* .build();
* ApiFuture<Operation> future =
* completionServiceClient.purgeCompletionSuggestionsCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<PurgeCompletionSuggestionsRequest, Operation>
purgeCompletionSuggestionsCallable() {
return stub.purgeCompletionSuggestionsCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 36,489 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/UpdateNodePoolRequestOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
public interface UpdateNodePoolRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.container.v1.UpdateNodePoolRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3677
* @return The projectId.
*/
@java.lang.Deprecated
java.lang.String getProjectId();
/**
*
*
* <pre>
* Deprecated. The Google Developers Console [project ID or project
* number](https://cloud.google.com/resource-manager/docs/creating-managing-projects).
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string project_id = 1 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.project_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3677
* @return The bytes for projectId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getProjectIdBytes();
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* in which the cluster resides. This field has been deprecated and replaced
* by the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=3683
* @return The zone.
*/
@java.lang.Deprecated
java.lang.String getZone();
/**
*
*
* <pre>
* Deprecated. The name of the Google Compute Engine
* [zone](https://cloud.google.com/compute/docs/zones#available)
* in which the cluster resides. This field has been deprecated and replaced
* by the name field.
* </pre>
*
* <code>string zone = 2 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.zone is deprecated. See
* google/container/v1/cluster_service.proto;l=3683
* @return The bytes for zone.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getZoneBytes();
/**
*
*
* <pre>
* Deprecated. The name of the cluster to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string cluster_id = 3 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.cluster_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3687
* @return The clusterId.
*/
@java.lang.Deprecated
java.lang.String getClusterId();
/**
*
*
* <pre>
* Deprecated. The name of the cluster to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string cluster_id = 3 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.cluster_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3687
* @return The bytes for clusterId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getClusterIdBytes();
/**
*
*
* <pre>
* Deprecated. The name of the node pool to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string node_pool_id = 4 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.node_pool_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3691
* @return The nodePoolId.
*/
@java.lang.Deprecated
java.lang.String getNodePoolId();
/**
*
*
* <pre>
* Deprecated. The name of the node pool to upgrade.
* This field has been deprecated and replaced by the name field.
* </pre>
*
* <code>string node_pool_id = 4 [deprecated = true];</code>
*
* @deprecated google.container.v1.UpdateNodePoolRequest.node_pool_id is deprecated. See
* google/container/v1/cluster_service.proto;l=3691
* @return The bytes for nodePoolId.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getNodePoolIdBytes();
/**
*
*
* <pre>
* Required. The Kubernetes version to change the nodes to (typically an
* upgrade).
*
* Users may specify either explicit versions offered by Kubernetes Engine or
* version aliases, which have the following behavior:
*
* - "latest": picks the highest valid Kubernetes version
* - "1.X": picks the highest valid patch+gke.N patch in the 1.X version
* - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version
* - "1.X.Y-gke.N": picks an explicit Kubernetes version
* - "-": picks the Kubernetes master version
* </pre>
*
* <code>string node_version = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The nodeVersion.
*/
java.lang.String getNodeVersion();
/**
*
*
* <pre>
* Required. The Kubernetes version to change the nodes to (typically an
* upgrade).
*
* Users may specify either explicit versions offered by Kubernetes Engine or
* version aliases, which have the following behavior:
*
* - "latest": picks the highest valid Kubernetes version
* - "1.X": picks the highest valid patch+gke.N patch in the 1.X version
* - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version
* - "1.X.Y-gke.N": picks an explicit Kubernetes version
* - "-": picks the Kubernetes master version
* </pre>
*
* <code>string node_version = 5 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for nodeVersion.
*/
com.google.protobuf.ByteString getNodeVersionBytes();
/**
*
*
* <pre>
* Required. The desired image type for the node pool. Please see
* https://cloud.google.com/kubernetes-engine/docs/concepts/node-images
* for available image types.
* </pre>
*
* <code>string image_type = 6 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The imageType.
*/
java.lang.String getImageType();
/**
*
*
* <pre>
* Required. The desired image type for the node pool. Please see
* https://cloud.google.com/kubernetes-engine/docs/concepts/node-images
* for available image types.
* </pre>
*
* <code>string image_type = 6 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for imageType.
*/
com.google.protobuf.ByteString getImageTypeBytes();
/**
*
*
* <pre>
* The name (project, location, cluster, node pool) of the node pool to
* update. Specified in the format
* `projects/*/locations/*/clusters/*/nodePools/*`.
* </pre>
*
* <code>string name = 8;</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* The name (project, location, cluster, node pool) of the node pool to
* update. Specified in the format
* `projects/*/locations/*/clusters/*/nodePools/*`.
* </pre>
*
* <code>string name = 8;</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @return A list containing the locations.
*/
java.util.List<java.lang.String> getLocationsList();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @return The count of locations.
*/
int getLocationsCount();
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @param index The index of the element to return.
* @return The locations at the given index.
*/
java.lang.String getLocations(int index);
/**
*
*
* <pre>
* The desired list of Google Compute Engine
* [zones](https://cloud.google.com/compute/docs/zones#available)
* in which the node pool's nodes should be located. Changing the locations
* for a node pool will result in nodes being either created or removed from
* the node pool, depending on whether locations are being added or removed.
* </pre>
*
* <code>repeated string locations = 13;</code>
*
* @param index The index of the value to return.
* @return The bytes of the locations at the given index.
*/
com.google.protobuf.ByteString getLocationsBytes(int index);
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*
* @return Whether the workloadMetadataConfig field is set.
*/
boolean hasWorkloadMetadataConfig();
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*
* @return The workloadMetadataConfig.
*/
com.google.container.v1.WorkloadMetadataConfig getWorkloadMetadataConfig();
/**
*
*
* <pre>
* The desired workload metadata config for the node pool.
* </pre>
*
* <code>.google.container.v1.WorkloadMetadataConfig workload_metadata_config = 14;</code>
*/
com.google.container.v1.WorkloadMetadataConfigOrBuilder getWorkloadMetadataConfigOrBuilder();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*
* @return Whether the upgradeSettings field is set.
*/
boolean hasUpgradeSettings();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*
* @return The upgradeSettings.
*/
com.google.container.v1.NodePool.UpgradeSettings getUpgradeSettings();
/**
*
*
* <pre>
* Upgrade settings control disruption and speed of the upgrade.
* </pre>
*
* <code>.google.container.v1.NodePool.UpgradeSettings upgrade_settings = 15;</code>
*/
com.google.container.v1.NodePool.UpgradeSettingsOrBuilder getUpgradeSettingsOrBuilder();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1.NetworkTags tags = 16;</code>
*
* @return Whether the tags field is set.
*/
boolean hasTags();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1.NetworkTags tags = 16;</code>
*
* @return The tags.
*/
com.google.container.v1.NetworkTags getTags();
/**
*
*
* <pre>
* The desired network tags to be applied to all nodes in the node pool.
* If this field is not present, the tags will not be changed. Otherwise,
* the existing network tags will be *replaced* with the provided tags.
* </pre>
*
* <code>.google.container.v1.NetworkTags tags = 16;</code>
*/
com.google.container.v1.NetworkTagsOrBuilder getTagsOrBuilder();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1.NodeTaints taints = 17;</code>
*
* @return Whether the taints field is set.
*/
boolean hasTaints();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1.NodeTaints taints = 17;</code>
*
* @return The taints.
*/
com.google.container.v1.NodeTaints getTaints();
/**
*
*
* <pre>
* The desired node taints to be applied to all nodes in the node pool.
* If this field is not present, the taints will not be changed. Otherwise,
* the existing node taints will be *replaced* with the provided taints.
* </pre>
*
* <code>.google.container.v1.NodeTaints taints = 17;</code>
*/
com.google.container.v1.NodeTaintsOrBuilder getTaintsOrBuilder();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1.NodeLabels labels = 18;</code>
*
* @return Whether the labels field is set.
*/
boolean hasLabels();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1.NodeLabels labels = 18;</code>
*
* @return The labels.
*/
com.google.container.v1.NodeLabels getLabels();
/**
*
*
* <pre>
* The desired node labels to be applied to all nodes in the node pool.
* If this field is not present, the labels will not be changed. Otherwise,
* the existing node labels will be *replaced* with the provided labels.
* </pre>
*
* <code>.google.container.v1.NodeLabels labels = 18;</code>
*/
com.google.container.v1.NodeLabelsOrBuilder getLabelsOrBuilder();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1.LinuxNodeConfig linux_node_config = 19;</code>
*
* @return Whether the linuxNodeConfig field is set.
*/
boolean hasLinuxNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1.LinuxNodeConfig linux_node_config = 19;</code>
*
* @return The linuxNodeConfig.
*/
com.google.container.v1.LinuxNodeConfig getLinuxNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Linux nodes.
* </pre>
*
* <code>.google.container.v1.LinuxNodeConfig linux_node_config = 19;</code>
*/
com.google.container.v1.LinuxNodeConfigOrBuilder getLinuxNodeConfigOrBuilder();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1.NodeKubeletConfig kubelet_config = 20;</code>
*
* @return Whether the kubeletConfig field is set.
*/
boolean hasKubeletConfig();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1.NodeKubeletConfig kubelet_config = 20;</code>
*
* @return The kubeletConfig.
*/
com.google.container.v1.NodeKubeletConfig getKubeletConfig();
/**
*
*
* <pre>
* Node kubelet configs.
* </pre>
*
* <code>.google.container.v1.NodeKubeletConfig kubelet_config = 20;</code>
*/
com.google.container.v1.NodeKubeletConfigOrBuilder getKubeletConfigOrBuilder();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1.NodeNetworkConfig node_network_config = 21;</code>
*
* @return Whether the nodeNetworkConfig field is set.
*/
boolean hasNodeNetworkConfig();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1.NodeNetworkConfig node_network_config = 21;</code>
*
* @return The nodeNetworkConfig.
*/
com.google.container.v1.NodeNetworkConfig getNodeNetworkConfig();
/**
*
*
* <pre>
* Node network config.
* </pre>
*
* <code>.google.container.v1.NodeNetworkConfig node_network_config = 21;</code>
*/
com.google.container.v1.NodeNetworkConfigOrBuilder getNodeNetworkConfigOrBuilder();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1.GcfsConfig gcfs_config = 22;</code>
*
* @return Whether the gcfsConfig field is set.
*/
boolean hasGcfsConfig();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1.GcfsConfig gcfs_config = 22;</code>
*
* @return The gcfsConfig.
*/
com.google.container.v1.GcfsConfig getGcfsConfig();
/**
*
*
* <pre>
* GCFS config.
* </pre>
*
* <code>.google.container.v1.GcfsConfig gcfs_config = 22;</code>
*/
com.google.container.v1.GcfsConfigOrBuilder getGcfsConfigOrBuilder();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1.ConfidentialNodes confidential_nodes = 23;</code>
*
* @return Whether the confidentialNodes field is set.
*/
boolean hasConfidentialNodes();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1.ConfidentialNodes confidential_nodes = 23;</code>
*
* @return The confidentialNodes.
*/
com.google.container.v1.ConfidentialNodes getConfidentialNodes();
/**
*
*
* <pre>
* Confidential nodes config.
* All the nodes in the node pool will be Confidential VM once enabled.
* </pre>
*
* <code>.google.container.v1.ConfidentialNodes confidential_nodes = 23;</code>
*/
com.google.container.v1.ConfidentialNodesOrBuilder getConfidentialNodesOrBuilder();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1.VirtualNIC gvnic = 29;</code>
*
* @return Whether the gvnic field is set.
*/
boolean hasGvnic();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1.VirtualNIC gvnic = 29;</code>
*
* @return The gvnic.
*/
com.google.container.v1.VirtualNIC getGvnic();
/**
*
*
* <pre>
* Enable or disable gvnic on the node pool.
* </pre>
*
* <code>.google.container.v1.VirtualNIC gvnic = 29;</code>
*/
com.google.container.v1.VirtualNICOrBuilder getGvnicOrBuilder();
/**
*
*
* <pre>
* The current etag of the node pool.
* If an etag is provided and does not match the current etag of the node
* pool, update will be blocked and an ABORTED error will be returned.
* </pre>
*
* <code>string etag = 30;</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* The current etag of the node pool.
* If an etag is provided and does not match the current etag of the node
* pool, update will be blocked and an ABORTED error will be returned.
* </pre>
*
* <code>string etag = 30;</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1.FastSocket fast_socket = 31;</code>
*
* @return Whether the fastSocket field is set.
*/
boolean hasFastSocket();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1.FastSocket fast_socket = 31;</code>
*
* @return The fastSocket.
*/
com.google.container.v1.FastSocket getFastSocket();
/**
*
*
* <pre>
* Enable or disable NCCL fast socket for the node pool.
* </pre>
*
* <code>.google.container.v1.FastSocket fast_socket = 31;</code>
*/
com.google.container.v1.FastSocketOrBuilder getFastSocketOrBuilder();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1.NodePoolLoggingConfig logging_config = 32;</code>
*
* @return Whether the loggingConfig field is set.
*/
boolean hasLoggingConfig();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1.NodePoolLoggingConfig logging_config = 32;</code>
*
* @return The loggingConfig.
*/
com.google.container.v1.NodePoolLoggingConfig getLoggingConfig();
/**
*
*
* <pre>
* Logging configuration.
* </pre>
*
* <code>.google.container.v1.NodePoolLoggingConfig logging_config = 32;</code>
*/
com.google.container.v1.NodePoolLoggingConfigOrBuilder getLoggingConfigOrBuilder();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1.ResourceLabels resource_labels = 33;</code>
*
* @return Whether the resourceLabels field is set.
*/
boolean hasResourceLabels();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1.ResourceLabels resource_labels = 33;</code>
*
* @return The resourceLabels.
*/
com.google.container.v1.ResourceLabels getResourceLabels();
/**
*
*
* <pre>
* The resource labels for the node pool to use to annotate any related
* Google Compute Engine resources.
* </pre>
*
* <code>.google.container.v1.ResourceLabels resource_labels = 33;</code>
*/
com.google.container.v1.ResourceLabelsOrBuilder getResourceLabelsOrBuilder();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1.WindowsNodeConfig windows_node_config = 34;</code>
*
* @return Whether the windowsNodeConfig field is set.
*/
boolean hasWindowsNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1.WindowsNodeConfig windows_node_config = 34;</code>
*
* @return The windowsNodeConfig.
*/
com.google.container.v1.WindowsNodeConfig getWindowsNodeConfig();
/**
*
*
* <pre>
* Parameters that can be configured on Windows nodes.
* </pre>
*
* <code>.google.container.v1.WindowsNodeConfig windows_node_config = 34;</code>
*/
com.google.container.v1.WindowsNodeConfigOrBuilder getWindowsNodeConfigOrBuilder();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1.AcceleratorConfig accelerators = 35;</code>
*/
java.util.List<com.google.container.v1.AcceleratorConfig> getAcceleratorsList();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1.AcceleratorConfig accelerators = 35;</code>
*/
com.google.container.v1.AcceleratorConfig getAccelerators(int index);
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1.AcceleratorConfig accelerators = 35;</code>
*/
int getAcceleratorsCount();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1.AcceleratorConfig accelerators = 35;</code>
*/
java.util.List<? extends com.google.container.v1.AcceleratorConfigOrBuilder>
getAcceleratorsOrBuilderList();
/**
*
*
* <pre>
* A list of hardware accelerators to be attached to each node.
* See
* https://cloud.google.com/compute/docs/gpus
* for more information about support for GPUs.
* </pre>
*
* <code>repeated .google.container.v1.AcceleratorConfig accelerators = 35;</code>
*/
com.google.container.v1.AcceleratorConfigOrBuilder getAcceleratorsOrBuilder(int index);
/**
*
*
* <pre>
* Optional. The desired [Google Compute Engine machine
* type](https://cloud.google.com/compute/docs/machine-types)
* for nodes in the node pool. Initiates an upgrade operation that migrates
* the nodes in the node pool to the specified machine type.
* </pre>
*
* <code>string machine_type = 36 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The machineType.
*/
java.lang.String getMachineType();
/**
*
*
* <pre>
* Optional. The desired [Google Compute Engine machine
* type](https://cloud.google.com/compute/docs/machine-types)
* for nodes in the node pool. Initiates an upgrade operation that migrates
* the nodes in the node pool to the specified machine type.
* </pre>
*
* <code>string machine_type = 36 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for machineType.
*/
com.google.protobuf.ByteString getMachineTypeBytes();
/**
*
*
* <pre>
* Optional. The desired disk type (e.g. 'pd-standard', 'pd-ssd' or
* 'pd-balanced') for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk type.
* </pre>
*
* <code>string disk_type = 37 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskType.
*/
java.lang.String getDiskType();
/**
*
*
* <pre>
* Optional. The desired disk type (e.g. 'pd-standard', 'pd-ssd' or
* 'pd-balanced') for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk type.
* </pre>
*
* <code>string disk_type = 37 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for diskType.
*/
com.google.protobuf.ByteString getDiskTypeBytes();
/**
*
*
* <pre>
* Optional. The desired disk size for nodes in the node pool specified in GB.
* The smallest allowed disk size is 10GB.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified disk size.
* </pre>
*
* <code>int64 disk_size_gb = 38 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskSizeGb.
*/
long getDiskSizeGb();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1.ResourceManagerTags resource_manager_tags = 39;</code>
*
* @return Whether the resourceManagerTags field is set.
*/
boolean hasResourceManagerTags();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1.ResourceManagerTags resource_manager_tags = 39;</code>
*
* @return The resourceManagerTags.
*/
com.google.container.v1.ResourceManagerTags getResourceManagerTags();
/**
*
*
* <pre>
* Desired resource manager tag keys and values to be attached to the nodes
* for managing Compute Engine firewalls using Network Firewall Policies.
* Existing tags will be replaced with new values.
* </pre>
*
* <code>.google.container.v1.ResourceManagerTags resource_manager_tags = 39;</code>
*/
com.google.container.v1.ResourceManagerTagsOrBuilder getResourceManagerTagsOrBuilder();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1.ContainerdConfig containerd_config = 40;</code>
*
* @return Whether the containerdConfig field is set.
*/
boolean hasContainerdConfig();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1.ContainerdConfig containerd_config = 40;</code>
*
* @return The containerdConfig.
*/
com.google.container.v1.ContainerdConfig getContainerdConfig();
/**
*
*
* <pre>
* The desired containerd config for nodes in the node pool.
* Initiates an upgrade operation that recreates the nodes with the new
* config.
* </pre>
*
* <code>.google.container.v1.ContainerdConfig containerd_config = 40;</code>
*/
com.google.container.v1.ContainerdConfigOrBuilder getContainerdConfigOrBuilder();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*
* @return Whether the queuedProvisioning field is set.
*/
boolean hasQueuedProvisioning();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*
* @return The queuedProvisioning.
*/
com.google.container.v1.NodePool.QueuedProvisioning getQueuedProvisioning();
/**
*
*
* <pre>
* Specifies the configuration of queued provisioning.
* </pre>
*
* <code>.google.container.v1.NodePool.QueuedProvisioning queued_provisioning = 42;</code>
*/
com.google.container.v1.NodePool.QueuedProvisioningOrBuilder getQueuedProvisioningOrBuilder();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @return A list containing the storagePools.
*/
java.util.List<java.lang.String> getStoragePoolsList();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @return The count of storagePools.
*/
int getStoragePoolsCount();
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @param index The index of the element to return.
* @return The storagePools at the given index.
*/
java.lang.String getStoragePools(int index);
/**
*
*
* <pre>
* List of Storage Pools where boot disks are provisioned.
* Existing Storage Pools will be replaced with storage-pools.
* </pre>
*
* <code>repeated string storage_pools = 43;</code>
*
* @param index The index of the value to return.
* @return The bytes of the storagePools at the given index.
*/
com.google.protobuf.ByteString getStoragePoolsBytes(int index);
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*
* @return Whether the maxRunDuration field is set.
*/
boolean hasMaxRunDuration();
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*
* @return The maxRunDuration.
*/
com.google.protobuf.Duration getMaxRunDuration();
/**
*
*
* <pre>
* The maximum duration for the nodes to exist.
* If unspecified, the nodes can exist indefinitely.
* </pre>
*
* <code>.google.protobuf.Duration max_run_duration = 45;</code>
*/
com.google.protobuf.DurationOrBuilder getMaxRunDurationOrBuilder();
/**
*
*
* <pre>
* Flex Start flag for enabling Flex Start VM.
* </pre>
*
* <code>optional bool flex_start = 46;</code>
*
* @return Whether the flexStart field is set.
*/
boolean hasFlexStart();
/**
*
*
* <pre>
* Flex Start flag for enabling Flex Start VM.
* </pre>
*
* <code>optional bool flex_start = 46;</code>
*
* @return The flexStart.
*/
boolean getFlexStart();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1.BootDisk boot_disk = 47;</code>
*
* @return Whether the bootDisk field is set.
*/
boolean hasBootDisk();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1.BootDisk boot_disk = 47;</code>
*
* @return The bootDisk.
*/
com.google.container.v1.BootDisk getBootDisk();
/**
*
*
* <pre>
* The desired boot disk config for nodes in the node pool.
* Initiates an upgrade operation that migrates the nodes in the
* node pool to the specified boot disk config.
* </pre>
*
* <code>.google.container.v1.BootDisk boot_disk = 47;</code>
*/
com.google.container.v1.BootDiskOrBuilder getBootDiskOrBuilder();
}
|
googleapis/google-cloud-java | 36,729 | java-speech/proto-google-cloud-speech-v2/src/main/java/com/google/cloud/speech/v2/ListPhraseSetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v2/cloud_speech.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.speech.v2;
/**
*
*
* <pre>
* Response message for the
* [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.ListPhraseSetsResponse}
*/
public final class ListPhraseSetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v2.ListPhraseSetsResponse)
ListPhraseSetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPhraseSetsResponse.newBuilder() to construct.
private ListPhraseSetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPhraseSetsResponse() {
phraseSets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPhraseSetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListPhraseSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListPhraseSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.ListPhraseSetsResponse.class,
com.google.cloud.speech.v2.ListPhraseSetsResponse.Builder.class);
}
public static final int PHRASE_SETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.speech.v2.PhraseSet> phraseSets_;
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.speech.v2.PhraseSet> getPhraseSetsList() {
return phraseSets_;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.speech.v2.PhraseSetOrBuilder>
getPhraseSetsOrBuilderList() {
return phraseSets_;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
@java.lang.Override
public int getPhraseSetsCount() {
return phraseSets_.size();
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v2.PhraseSet getPhraseSets(int index) {
return phraseSets_.get(index);
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v2.PhraseSetOrBuilder getPhraseSetsOrBuilder(int index) {
return phraseSets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < phraseSets_.size(); i++) {
output.writeMessage(1, phraseSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < phraseSets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, phraseSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v2.ListPhraseSetsResponse)) {
return super.equals(obj);
}
com.google.cloud.speech.v2.ListPhraseSetsResponse other =
(com.google.cloud.speech.v2.ListPhraseSetsResponse) obj;
if (!getPhraseSetsList().equals(other.getPhraseSetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPhraseSetsCount() > 0) {
hash = (37 * hash) + PHRASE_SETS_FIELD_NUMBER;
hash = (53 * hash) + getPhraseSetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v2.ListPhraseSetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the
* [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v2.ListPhraseSetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v2.ListPhraseSetsResponse)
com.google.cloud.speech.v2.ListPhraseSetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListPhraseSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListPhraseSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v2.ListPhraseSetsResponse.class,
com.google.cloud.speech.v2.ListPhraseSetsResponse.Builder.class);
}
// Construct using com.google.cloud.speech.v2.ListPhraseSetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (phraseSetsBuilder_ == null) {
phraseSets_ = java.util.Collections.emptyList();
} else {
phraseSets_ = null;
phraseSetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v2.CloudSpeechProto
.internal_static_google_cloud_speech_v2_ListPhraseSetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListPhraseSetsResponse getDefaultInstanceForType() {
return com.google.cloud.speech.v2.ListPhraseSetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v2.ListPhraseSetsResponse build() {
com.google.cloud.speech.v2.ListPhraseSetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListPhraseSetsResponse buildPartial() {
com.google.cloud.speech.v2.ListPhraseSetsResponse result =
new com.google.cloud.speech.v2.ListPhraseSetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.speech.v2.ListPhraseSetsResponse result) {
if (phraseSetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
phraseSets_ = java.util.Collections.unmodifiableList(phraseSets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.phraseSets_ = phraseSets_;
} else {
result.phraseSets_ = phraseSetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.speech.v2.ListPhraseSetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v2.ListPhraseSetsResponse) {
return mergeFrom((com.google.cloud.speech.v2.ListPhraseSetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v2.ListPhraseSetsResponse other) {
if (other == com.google.cloud.speech.v2.ListPhraseSetsResponse.getDefaultInstance())
return this;
if (phraseSetsBuilder_ == null) {
if (!other.phraseSets_.isEmpty()) {
if (phraseSets_.isEmpty()) {
phraseSets_ = other.phraseSets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePhraseSetsIsMutable();
phraseSets_.addAll(other.phraseSets_);
}
onChanged();
}
} else {
if (!other.phraseSets_.isEmpty()) {
if (phraseSetsBuilder_.isEmpty()) {
phraseSetsBuilder_.dispose();
phraseSetsBuilder_ = null;
phraseSets_ = other.phraseSets_;
bitField0_ = (bitField0_ & ~0x00000001);
phraseSetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPhraseSetsFieldBuilder()
: null;
} else {
phraseSetsBuilder_.addAllMessages(other.phraseSets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.speech.v2.PhraseSet m =
input.readMessage(
com.google.cloud.speech.v2.PhraseSet.parser(), extensionRegistry);
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
phraseSets_.add(m);
} else {
phraseSetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.speech.v2.PhraseSet> phraseSets_ =
java.util.Collections.emptyList();
private void ensurePhraseSetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
phraseSets_ = new java.util.ArrayList<com.google.cloud.speech.v2.PhraseSet>(phraseSets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.PhraseSet,
com.google.cloud.speech.v2.PhraseSet.Builder,
com.google.cloud.speech.v2.PhraseSetOrBuilder>
phraseSetsBuilder_;
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public java.util.List<com.google.cloud.speech.v2.PhraseSet> getPhraseSetsList() {
if (phraseSetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(phraseSets_);
} else {
return phraseSetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public int getPhraseSetsCount() {
if (phraseSetsBuilder_ == null) {
return phraseSets_.size();
} else {
return phraseSetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public com.google.cloud.speech.v2.PhraseSet getPhraseSets(int index) {
if (phraseSetsBuilder_ == null) {
return phraseSets_.get(index);
} else {
return phraseSetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder setPhraseSets(int index, com.google.cloud.speech.v2.PhraseSet value) {
if (phraseSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePhraseSetsIsMutable();
phraseSets_.set(index, value);
onChanged();
} else {
phraseSetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder setPhraseSets(
int index, com.google.cloud.speech.v2.PhraseSet.Builder builderForValue) {
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
phraseSets_.set(index, builderForValue.build());
onChanged();
} else {
phraseSetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder addPhraseSets(com.google.cloud.speech.v2.PhraseSet value) {
if (phraseSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePhraseSetsIsMutable();
phraseSets_.add(value);
onChanged();
} else {
phraseSetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder addPhraseSets(int index, com.google.cloud.speech.v2.PhraseSet value) {
if (phraseSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePhraseSetsIsMutable();
phraseSets_.add(index, value);
onChanged();
} else {
phraseSetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder addPhraseSets(com.google.cloud.speech.v2.PhraseSet.Builder builderForValue) {
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
phraseSets_.add(builderForValue.build());
onChanged();
} else {
phraseSetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder addPhraseSets(
int index, com.google.cloud.speech.v2.PhraseSet.Builder builderForValue) {
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
phraseSets_.add(index, builderForValue.build());
onChanged();
} else {
phraseSetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder addAllPhraseSets(
java.lang.Iterable<? extends com.google.cloud.speech.v2.PhraseSet> values) {
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, phraseSets_);
onChanged();
} else {
phraseSetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder clearPhraseSets() {
if (phraseSetsBuilder_ == null) {
phraseSets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
phraseSetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public Builder removePhraseSets(int index) {
if (phraseSetsBuilder_ == null) {
ensurePhraseSetsIsMutable();
phraseSets_.remove(index);
onChanged();
} else {
phraseSetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public com.google.cloud.speech.v2.PhraseSet.Builder getPhraseSetsBuilder(int index) {
return getPhraseSetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public com.google.cloud.speech.v2.PhraseSetOrBuilder getPhraseSetsOrBuilder(int index) {
if (phraseSetsBuilder_ == null) {
return phraseSets_.get(index);
} else {
return phraseSetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.speech.v2.PhraseSetOrBuilder>
getPhraseSetsOrBuilderList() {
if (phraseSetsBuilder_ != null) {
return phraseSetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(phraseSets_);
}
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public com.google.cloud.speech.v2.PhraseSet.Builder addPhraseSetsBuilder() {
return getPhraseSetsFieldBuilder()
.addBuilder(com.google.cloud.speech.v2.PhraseSet.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public com.google.cloud.speech.v2.PhraseSet.Builder addPhraseSetsBuilder(int index) {
return getPhraseSetsFieldBuilder()
.addBuilder(index, com.google.cloud.speech.v2.PhraseSet.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested PhraseSets.
* </pre>
*
* <code>repeated .google.cloud.speech.v2.PhraseSet phrase_sets = 1;</code>
*/
public java.util.List<com.google.cloud.speech.v2.PhraseSet.Builder> getPhraseSetsBuilderList() {
return getPhraseSetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.PhraseSet,
com.google.cloud.speech.v2.PhraseSet.Builder,
com.google.cloud.speech.v2.PhraseSetOrBuilder>
getPhraseSetsFieldBuilder() {
if (phraseSetsBuilder_ == null) {
phraseSetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v2.PhraseSet,
com.google.cloud.speech.v2.PhraseSet.Builder,
com.google.cloud.speech.v2.PhraseSetOrBuilder>(
phraseSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
phraseSets_ = null;
}
return phraseSetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as
* [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
* retrieve the next page. If this field is omitted, there are no subsequent
* pages. This token expires after 72 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v2.ListPhraseSetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v2.ListPhraseSetsResponse)
private static final com.google.cloud.speech.v2.ListPhraseSetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v2.ListPhraseSetsResponse();
}
public static com.google.cloud.speech.v2.ListPhraseSetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPhraseSetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPhraseSetsResponse>() {
@java.lang.Override
public ListPhraseSetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPhraseSetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPhraseSetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v2.ListPhraseSetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,010 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/stub/EntityTypesStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1.stub;
import static com.google.cloud.dialogflow.cx.v3beta1.EntityTypesClient.ListEntityTypesPagedResponse;
import static com.google.cloud.dialogflow.cx.v3beta1.EntityTypesClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3beta1.CreateEntityTypeRequest;
import com.google.cloud.dialogflow.cx.v3beta1.DeleteEntityTypeRequest;
import com.google.cloud.dialogflow.cx.v3beta1.EntityType;
import com.google.cloud.dialogflow.cx.v3beta1.ExportEntityTypesMetadata;
import com.google.cloud.dialogflow.cx.v3beta1.ExportEntityTypesRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ExportEntityTypesResponse;
import com.google.cloud.dialogflow.cx.v3beta1.GetEntityTypeRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ImportEntityTypesMetadata;
import com.google.cloud.dialogflow.cx.v3beta1.ImportEntityTypesRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ImportEntityTypesResponse;
import com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesResponse;
import com.google.cloud.dialogflow.cx.v3beta1.UpdateEntityTypeRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link EntityTypesStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dialogflow.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getEntityType:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* EntityTypesStubSettings.Builder entityTypesSettingsBuilder =
* EntityTypesStubSettings.newBuilder();
* entityTypesSettingsBuilder
* .getEntityTypeSettings()
* .setRetrySettings(
* entityTypesSettingsBuilder
* .getEntityTypeSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* EntityTypesStubSettings entityTypesSettings = entityTypesSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for exportEntityTypes:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* EntityTypesStubSettings.Builder entityTypesSettingsBuilder =
* EntityTypesStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* entityTypesSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class EntityTypesStubSettings extends StubSettings<EntityTypesStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/dialogflow")
.build();
private final UnaryCallSettings<GetEntityTypeRequest, EntityType> getEntityTypeSettings;
private final UnaryCallSettings<CreateEntityTypeRequest, EntityType> createEntityTypeSettings;
private final UnaryCallSettings<UpdateEntityTypeRequest, EntityType> updateEntityTypeSettings;
private final UnaryCallSettings<DeleteEntityTypeRequest, Empty> deleteEntityTypeSettings;
private final PagedCallSettings<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>
listEntityTypesSettings;
private final UnaryCallSettings<ExportEntityTypesRequest, Operation> exportEntityTypesSettings;
private final OperationCallSettings<
ExportEntityTypesRequest, ExportEntityTypesResponse, ExportEntityTypesMetadata>
exportEntityTypesOperationSettings;
private final UnaryCallSettings<ImportEntityTypesRequest, Operation> importEntityTypesSettings;
private final OperationCallSettings<
ImportEntityTypesRequest, ImportEntityTypesResponse, ImportEntityTypesMetadata>
importEntityTypesOperationSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<
ListEntityTypesRequest, ListEntityTypesResponse, EntityType>
LIST_ENTITY_TYPES_PAGE_STR_DESC =
new PagedListDescriptor<ListEntityTypesRequest, ListEntityTypesResponse, EntityType>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListEntityTypesRequest injectToken(
ListEntityTypesRequest payload, String token) {
return ListEntityTypesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListEntityTypesRequest injectPageSize(
ListEntityTypesRequest payload, int pageSize) {
return ListEntityTypesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListEntityTypesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListEntityTypesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<EntityType> extractResources(ListEntityTypesResponse payload) {
return payload.getEntityTypesList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>
LIST_ENTITY_TYPES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>() {
@Override
public ApiFuture<ListEntityTypesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListEntityTypesRequest, ListEntityTypesResponse> callable,
ListEntityTypesRequest request,
ApiCallContext context,
ApiFuture<ListEntityTypesResponse> futureResponse) {
PageContext<ListEntityTypesRequest, ListEntityTypesResponse, EntityType> pageContext =
PageContext.create(callable, LIST_ENTITY_TYPES_PAGE_STR_DESC, request, context);
return ListEntityTypesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to getEntityType. */
public UnaryCallSettings<GetEntityTypeRequest, EntityType> getEntityTypeSettings() {
return getEntityTypeSettings;
}
/** Returns the object with the settings used for calls to createEntityType. */
public UnaryCallSettings<CreateEntityTypeRequest, EntityType> createEntityTypeSettings() {
return createEntityTypeSettings;
}
/** Returns the object with the settings used for calls to updateEntityType. */
public UnaryCallSettings<UpdateEntityTypeRequest, EntityType> updateEntityTypeSettings() {
return updateEntityTypeSettings;
}
/** Returns the object with the settings used for calls to deleteEntityType. */
public UnaryCallSettings<DeleteEntityTypeRequest, Empty> deleteEntityTypeSettings() {
return deleteEntityTypeSettings;
}
/** Returns the object with the settings used for calls to listEntityTypes. */
public PagedCallSettings<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>
listEntityTypesSettings() {
return listEntityTypesSettings;
}
/** Returns the object with the settings used for calls to exportEntityTypes. */
public UnaryCallSettings<ExportEntityTypesRequest, Operation> exportEntityTypesSettings() {
return exportEntityTypesSettings;
}
/** Returns the object with the settings used for calls to exportEntityTypes. */
public OperationCallSettings<
ExportEntityTypesRequest, ExportEntityTypesResponse, ExportEntityTypesMetadata>
exportEntityTypesOperationSettings() {
return exportEntityTypesOperationSettings;
}
/** Returns the object with the settings used for calls to importEntityTypes. */
public UnaryCallSettings<ImportEntityTypesRequest, Operation> importEntityTypesSettings() {
return importEntityTypesSettings;
}
/** Returns the object with the settings used for calls to importEntityTypes. */
public OperationCallSettings<
ImportEntityTypesRequest, ImportEntityTypesResponse, ImportEntityTypesMetadata>
importEntityTypesOperationSettings() {
return importEntityTypesOperationSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public EntityTypesStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcEntityTypesStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonEntityTypesStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "dialogflow";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "dialogflow.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "dialogflow.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(EntityTypesStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(EntityTypesStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return EntityTypesStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected EntityTypesStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getEntityTypeSettings = settingsBuilder.getEntityTypeSettings().build();
createEntityTypeSettings = settingsBuilder.createEntityTypeSettings().build();
updateEntityTypeSettings = settingsBuilder.updateEntityTypeSettings().build();
deleteEntityTypeSettings = settingsBuilder.deleteEntityTypeSettings().build();
listEntityTypesSettings = settingsBuilder.listEntityTypesSettings().build();
exportEntityTypesSettings = settingsBuilder.exportEntityTypesSettings().build();
exportEntityTypesOperationSettings =
settingsBuilder.exportEntityTypesOperationSettings().build();
importEntityTypesSettings = settingsBuilder.importEntityTypesSettings().build();
importEntityTypesOperationSettings =
settingsBuilder.importEntityTypesOperationSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for EntityTypesStubSettings. */
public static class Builder extends StubSettings.Builder<EntityTypesStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetEntityTypeRequest, EntityType> getEntityTypeSettings;
private final UnaryCallSettings.Builder<CreateEntityTypeRequest, EntityType>
createEntityTypeSettings;
private final UnaryCallSettings.Builder<UpdateEntityTypeRequest, EntityType>
updateEntityTypeSettings;
private final UnaryCallSettings.Builder<DeleteEntityTypeRequest, Empty>
deleteEntityTypeSettings;
private final PagedCallSettings.Builder<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>
listEntityTypesSettings;
private final UnaryCallSettings.Builder<ExportEntityTypesRequest, Operation>
exportEntityTypesSettings;
private final OperationCallSettings.Builder<
ExportEntityTypesRequest, ExportEntityTypesResponse, ExportEntityTypesMetadata>
exportEntityTypesOperationSettings;
private final UnaryCallSettings.Builder<ImportEntityTypesRequest, Operation>
importEntityTypesSettings;
private final OperationCallSettings.Builder<
ImportEntityTypesRequest, ImportEntityTypesResponse, ImportEntityTypesMetadata>
importEntityTypesOperationSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getEntityTypeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createEntityTypeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateEntityTypeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteEntityTypeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listEntityTypesSettings = PagedCallSettings.newBuilder(LIST_ENTITY_TYPES_PAGE_STR_FACT);
exportEntityTypesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
exportEntityTypesOperationSettings = OperationCallSettings.newBuilder();
importEntityTypesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importEntityTypesOperationSettings = OperationCallSettings.newBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getEntityTypeSettings,
createEntityTypeSettings,
updateEntityTypeSettings,
deleteEntityTypeSettings,
listEntityTypesSettings,
exportEntityTypesSettings,
importEntityTypesSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(EntityTypesStubSettings settings) {
super(settings);
getEntityTypeSettings = settings.getEntityTypeSettings.toBuilder();
createEntityTypeSettings = settings.createEntityTypeSettings.toBuilder();
updateEntityTypeSettings = settings.updateEntityTypeSettings.toBuilder();
deleteEntityTypeSettings = settings.deleteEntityTypeSettings.toBuilder();
listEntityTypesSettings = settings.listEntityTypesSettings.toBuilder();
exportEntityTypesSettings = settings.exportEntityTypesSettings.toBuilder();
exportEntityTypesOperationSettings = settings.exportEntityTypesOperationSettings.toBuilder();
importEntityTypesSettings = settings.importEntityTypesSettings.toBuilder();
importEntityTypesOperationSettings = settings.importEntityTypesOperationSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getEntityTypeSettings,
createEntityTypeSettings,
updateEntityTypeSettings,
deleteEntityTypeSettings,
listEntityTypesSettings,
exportEntityTypesSettings,
importEntityTypesSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getEntityTypeSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createEntityTypeSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.updateEntityTypeSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteEntityTypeSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listEntityTypesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.exportEntityTypesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.importEntityTypesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.exportEntityTypesOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<ExportEntityTypesRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(
ExportEntityTypesResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(
ExportEntityTypesMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.importEntityTypesOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<ImportEntityTypesRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(
ImportEntityTypesResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(
ImportEntityTypesMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getEntityType. */
public UnaryCallSettings.Builder<GetEntityTypeRequest, EntityType> getEntityTypeSettings() {
return getEntityTypeSettings;
}
/** Returns the builder for the settings used for calls to createEntityType. */
public UnaryCallSettings.Builder<CreateEntityTypeRequest, EntityType>
createEntityTypeSettings() {
return createEntityTypeSettings;
}
/** Returns the builder for the settings used for calls to updateEntityType. */
public UnaryCallSettings.Builder<UpdateEntityTypeRequest, EntityType>
updateEntityTypeSettings() {
return updateEntityTypeSettings;
}
/** Returns the builder for the settings used for calls to deleteEntityType. */
public UnaryCallSettings.Builder<DeleteEntityTypeRequest, Empty> deleteEntityTypeSettings() {
return deleteEntityTypeSettings;
}
/** Returns the builder for the settings used for calls to listEntityTypes. */
public PagedCallSettings.Builder<
ListEntityTypesRequest, ListEntityTypesResponse, ListEntityTypesPagedResponse>
listEntityTypesSettings() {
return listEntityTypesSettings;
}
/** Returns the builder for the settings used for calls to exportEntityTypes. */
public UnaryCallSettings.Builder<ExportEntityTypesRequest, Operation>
exportEntityTypesSettings() {
return exportEntityTypesSettings;
}
/** Returns the builder for the settings used for calls to exportEntityTypes. */
public OperationCallSettings.Builder<
ExportEntityTypesRequest, ExportEntityTypesResponse, ExportEntityTypesMetadata>
exportEntityTypesOperationSettings() {
return exportEntityTypesOperationSettings;
}
/** Returns the builder for the settings used for calls to importEntityTypes. */
public UnaryCallSettings.Builder<ImportEntityTypesRequest, Operation>
importEntityTypesSettings() {
return importEntityTypesSettings;
}
/** Returns the builder for the settings used for calls to importEntityTypes. */
public OperationCallSettings.Builder<
ImportEntityTypesRequest, ImportEntityTypesResponse, ImportEntityTypesMetadata>
importEntityTypesOperationSettings() {
return importEntityTypesOperationSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public EntityTypesStubSettings build() throws IOException {
return new EntityTypesStubSettings(this);
}
}
}
|
google/archive-patcher | 37,155 | generator/src/test/java/com/google/archivepatcher/generator/PreDiffPlannerTest.java | // Copyright 2016 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.archivepatcher.generator;
import com.google.archivepatcher.generator.DefaultDeflateCompressionDiviner.DivinationResult;
import com.google.archivepatcher.shared.DefaultDeflateCompatibilityWindow;
import com.google.archivepatcher.shared.JreDeflateParameters;
import com.google.archivepatcher.shared.RandomAccessFileInputStream;
import com.google.archivepatcher.shared.TypedRange;
import com.google.archivepatcher.shared.UnitTestZipArchive;
import com.google.archivepatcher.shared.UnitTestZipEntry;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link PreDiffPlanner}.
*/
@RunWith(JUnit4.class)
@SuppressWarnings("javadoc")
public class PreDiffPlannerTest {
// All the A and B entries consist of a chunk of text followed by a standard corpus of text from
// the DefaultDeflateCompatibilityDiviner that ensures the tests will be able to discriminate
// between any compression level. Without this additional corpus text, multiple compression levels
// can match the entry and the unit tests would not be accurate.
private static final UnitTestZipEntry ENTRY_A_LEVEL_6 =
UnitTestZipArchive.makeUnitTestZipEntry("/path A", 6, "entry A", null);
private static final UnitTestZipEntry ENTRY_A_LEVEL_9 =
UnitTestZipArchive.makeUnitTestZipEntry("/path A", 9, "entry A", null);
private static final UnitTestZipEntry ENTRY_A_STORED =
UnitTestZipArchive.makeUnitTestZipEntry("/path A", 0, "entry A", null);
private static final UnitTestZipEntry ENTRY_B_LEVEL_6 =
UnitTestZipArchive.makeUnitTestZipEntry("/path B", 6, "entry B", null);
private static final UnitTestZipEntry ENTRY_B_LEVEL_9 =
UnitTestZipArchive.makeUnitTestZipEntry("/path B", 9, "entry B", null);
/**
* Entry C1 is a small entry WITHOUT the standard corpus of text from
* {@link DefaultDeflateCompatibilityWindow} appended. It has exactly the same compressed length
* as {@link #FIXED_LENGTH_ENTRY_C2_LEVEL_6}, and can be used to test the byte-matching logic in
* the code when the compressed lengths are identical.
*/
private static final UnitTestZipEntry FIXED_LENGTH_ENTRY_C1_LEVEL_6 =
new UnitTestZipEntry("/path C", 6, "qqqqqqqqqqqqqqqqqqqqqqqqqqqq", null);
/**
* Entry C2 is a small entry WITHOUT the standard corpus of text from
* {@link DefaultDeflateCompatibilityWindow} appended. It has exactly the same compressed length
* as {@link #FIXED_LENGTH_ENTRY_C1_LEVEL_6}, and can be used to test the byte-matching logic in
* the code when the compressed lengths are identical.
*/
private static final UnitTestZipEntry FIXED_LENGTH_ENTRY_C2_LEVEL_6 =
new UnitTestZipEntry("/path C", 6, "rrrrrrrrrrrrrrrrrrrrrrrrrrrr", null);
// The "shadow" entries are exact copies of ENTRY_A_* but have a different path. These are used
// for the detection of renames that don't involve modification (i.e., the uncompressed CRC32 is
// exactly the same as the ENTRY_A_* entries)
private static final UnitTestZipEntry SHADOW_ENTRY_A_LEVEL_1 =
UnitTestZipArchive.makeUnitTestZipEntry("/uncompressed data same as A", 1, "entry A", null);
private static final UnitTestZipEntry SHADOW_ENTRY_A_LEVEL_6 =
UnitTestZipArchive.makeUnitTestZipEntry("/same as A level 6", 6, "entry A", null);
private static final UnitTestZipEntry SHADOW_ENTRY_A_LEVEL_9 =
UnitTestZipArchive.makeUnitTestZipEntry("/same as A level 9", 9, "entry A", null);
private static final UnitTestZipEntry SHADOW_ENTRY_A_STORED =
UnitTestZipArchive.makeUnitTestZipEntry("/same as A stored", 0, "entry A", null);
private List<File> tempFilesCreated;
private Map<File, Map<ByteArrayHolder, MinimalZipEntry>> entriesByPathByTempFile;
@Before
public void setup() {
tempFilesCreated = new LinkedList<File>();
entriesByPathByTempFile = new HashMap<File, Map<ByteArrayHolder, MinimalZipEntry>>();
}
@After
public void tearDown() {
for (File file : tempFilesCreated) {
try {
file.delete();
} catch (Exception ignored) {
// Nothing
}
}
}
/**
* Stores the specified bytes to disk in a temp file, returns the temp file and caches the zip
* entries for the file for use in later code.
* @param data the bytes to store, expected to be a valid zip file
* @throws IOException if it fails
*/
private File storeAndMapArchive(byte[] data) throws IOException {
File file = File.createTempFile("pdpt", "zip");
tempFilesCreated.add(file);
file.deleteOnExit();
FileOutputStream out = new FileOutputStream(file);
out.write(data);
out.flush();
out.close();
Map<ByteArrayHolder, MinimalZipEntry> entriesByPath = new HashMap<>();
for (MinimalZipEntry zipEntry : MinimalZipArchive.listEntries(file)) {
ByteArrayHolder key = new ByteArrayHolder(zipEntry.getFileNameBytes());
entriesByPath.put(key, zipEntry);
}
entriesByPathByTempFile.put(file, entriesByPath);
return file;
}
/**
* Finds a unit test entry in the specified temp file.
* @param tempFile the archive to search within
* @param unitTestEntry the unit test entry to look up
* @return the {@link MinimalZipEntry} corresponding to the unit test entry
*/
private MinimalZipEntry findEntry(File tempFile, UnitTestZipEntry unitTestEntry) {
Map<ByteArrayHolder, MinimalZipEntry> subMap = entriesByPathByTempFile.get(tempFile);
Assert.assertNotNull("temp file not mapped", subMap);
ByteArrayHolder key;
try {
key = new ByteArrayHolder(unitTestEntry.path.getBytes("UTF8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
return subMap.get(key);
}
/**
* Finds the {@link TypedRange} corresponding to the compressed data for the specified unit test
* entry in the specified temp file.
* @param tempFile the archive to search within
* @param unitTestEntry the unit test entry to look up
* @return the {@link TypedRange} for the unit test entry's compressed data
*/
private TypedRange<Void> findRangeWithoutParams(File tempFile, UnitTestZipEntry unitTestEntry) {
MinimalZipEntry found = findEntry(tempFile, unitTestEntry);
Assert.assertNotNull("entry not found in temp file", found);
return new TypedRange<Void>(
found.getFileOffsetOfCompressedData(), found.getCompressedSize(), null);
}
/**
* Finds the {@link TypedRange} corresponding to the compressed data for the specified unit test
* entry in the specified temp file.
* @param tempFile the archive to search within
* @param unitTestEntry the unit test entry to look up
* @return the {@link TypedRange} for the unit test entry's compressed data
*/
private TypedRange<JreDeflateParameters> findRangeWithParams(
File tempFile, UnitTestZipEntry unitTestEntry) {
MinimalZipEntry found = findEntry(tempFile, unitTestEntry);
Assert.assertNotNull("entry not found in temp file", found);
return new TypedRange<JreDeflateParameters>(
found.getFileOffsetOfCompressedData(),
found.getCompressedSize(),
JreDeflateParameters.of(unitTestEntry.level, 0, true));
}
/**
* Deliberately introduce an error into the specified entry. This will make the entry impossible
* to divine the settings for, because it is broken.
* @param tempFile the archive to search within
* @param unitTestEntry the unit test entry to deliberately corrupt
*/
private void corruptEntryData(File tempFile, UnitTestZipEntry unitTestEntry) throws IOException {
TypedRange<Void> range = findRangeWithoutParams(tempFile, unitTestEntry);
Assert.assertTrue("range too short to corrupt with 'junk'", range.getLength() >= 4);
try (RandomAccessFile raf = new RandomAccessFile(tempFile, "rw")) {
raf.seek(range.getOffset());
raf.write("junk".getBytes("UTF8"));
}
}
/**
* Deliberately garble the compression method in the specified entry such that it is no longer
* deflate.
* @param tempFile the archive to search within
* @param unitTestEntry the unit test entry to deliberately corrupt
*/
private void corruptCompressionMethod(File tempFile, UnitTestZipEntry unitTestEntry)
throws IOException {
long centralDirectoryRecordOffset = -1;
try (RandomAccessFileInputStream rafis = new RandomAccessFileInputStream(tempFile)) {
long startOfEocd = MinimalZipParser.locateStartOfEocd(rafis, 32768);
rafis.setRange(startOfEocd, tempFile.length() - startOfEocd);
MinimalCentralDirectoryMetadata centralDirectoryMetadata = MinimalZipParser.parseEocd(rafis);
int numEntries = centralDirectoryMetadata.getNumEntriesInCentralDirectory();
rafis.setRange(
centralDirectoryMetadata.getOffsetOfCentralDirectory(),
centralDirectoryMetadata.getLengthOfCentralDirectory());
for (int x = 0; x < numEntries; x++) {
long recordStartOffset = rafis.getPosition();
MinimalZipEntry candidate = MinimalZipParser.parseCentralDirectoryEntry(rafis);
if (candidate.getFileName().equals(unitTestEntry.path)) {
// Located! Track offset and bail out.
centralDirectoryRecordOffset = recordStartOffset;
x = numEntries;
}
}
}
Assert.assertNotEquals("Entry not found", -1L, centralDirectoryRecordOffset);
try (RandomAccessFile raf = new RandomAccessFile(tempFile, "rw")) {
// compression method is a 2 byte field stored 10 bytes into the record
raf.seek(centralDirectoryRecordOffset + 10);
raf.write(7);
raf.write(7);
}
}
private PreDiffPlan invokeGeneratePreDiffPlan(
File oldFile, File newFile, RecommendationModifier... recommendationModifiers)
throws IOException {
Map<ByteArrayHolder, MinimalZipEntry> originalOldArchiveZipEntriesByPath =
new LinkedHashMap<ByteArrayHolder, MinimalZipEntry>();
Map<ByteArrayHolder, MinimalZipEntry> originalNewArchiveZipEntriesByPath =
new LinkedHashMap<ByteArrayHolder, MinimalZipEntry>();
Map<ByteArrayHolder, JreDeflateParameters> originalNewArchiveJreDeflateParametersByPath =
new LinkedHashMap<ByteArrayHolder, JreDeflateParameters>();
for (MinimalZipEntry zipEntry : MinimalZipArchive.listEntries(oldFile)) {
ByteArrayHolder key = new ByteArrayHolder(zipEntry.getFileNameBytes());
originalOldArchiveZipEntriesByPath.put(key, zipEntry);
}
DefaultDeflateCompressionDiviner diviner = new DefaultDeflateCompressionDiviner();
for (DivinationResult divinationResult : diviner.divineDeflateParameters(newFile)) {
ByteArrayHolder key = new ByteArrayHolder(divinationResult.minimalZipEntry.getFileNameBytes());
originalNewArchiveZipEntriesByPath.put(key, divinationResult.minimalZipEntry);
originalNewArchiveJreDeflateParametersByPath.put(key, divinationResult.divinedParameters);
}
PreDiffPlanner preDiffPlanner =
new PreDiffPlanner(
oldFile,
originalOldArchiveZipEntriesByPath,
newFile,
originalNewArchiveZipEntriesByPath,
originalNewArchiveJreDeflateParametersByPath,
recommendationModifiers);
return preDiffPlanner.generatePreDiffPlan();
}
private void checkRecommendation(PreDiffPlan plan, QualifiedRecommendation... expected) {
Assert.assertNotNull(plan.getQualifiedRecommendations());
Assert.assertEquals(expected.length, plan.getQualifiedRecommendations().size());
for (int x = 0; x < expected.length; x++) {
QualifiedRecommendation actual = plan.getQualifiedRecommendations().get(x);
Assert.assertEquals(
expected[x].getOldEntry().getFileName(), actual.getOldEntry().getFileName());
Assert.assertEquals(
expected[x].getNewEntry().getFileName(), actual.getNewEntry().getFileName());
Assert.assertEquals(expected[x].getRecommendation(), actual.getRecommendation());
Assert.assertEquals(expected[x].getReason(), actual.getReason());
}
}
@Test
public void testGeneratePreDiffPlan_OneCompressedEntry_Unchanged() throws IOException {
byte[] bytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
File oldFile = storeAndMapArchive(bytes);
File newFile = storeAndMapArchive(bytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to leave the entry alone in both the old and new archives (empty plans).
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, ENTRY_A_LEVEL_6),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.COMPRESSED_BYTES_IDENTICAL));
}
@Test
public void testGeneratePreDiffPlan_OneCompressedEntry_LengthsChanged() throws IOException {
// Test detection of compressed entry differences based on length mismatch.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress the entry in both the old and new archives.
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertEquals(1, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithParams(newFile, ENTRY_A_LEVEL_9), plan.getNewFileUncompressionPlan().get(0));
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, ENTRY_A_LEVEL_9),
Recommendation.UNCOMPRESS_BOTH,
RecommendationReason.COMPRESSED_BYTES_CHANGED));
}
@Test
public void testGeneratePreDiffPlan_OneCompressedEntry_BytesChanged() throws IOException {
// Test detection of compressed entry differences based on binary content mismatch where the
// compressed lengths are exactly the same - i.e., force a byte-by-byte comparison of the
// compressed data in the two entries.
byte[] oldBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(FIXED_LENGTH_ENTRY_C1_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(FIXED_LENGTH_ENTRY_C2_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress the entry in both the old and new archives.
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(1, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, FIXED_LENGTH_ENTRY_C1_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertEquals(
findRangeWithParams(newFile, FIXED_LENGTH_ENTRY_C2_LEVEL_6),
plan.getNewFileUncompressionPlan().get(0));
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, FIXED_LENGTH_ENTRY_C1_LEVEL_6),
findEntry(newFile, FIXED_LENGTH_ENTRY_C2_LEVEL_6),
Recommendation.UNCOMPRESS_BOTH,
RecommendationReason.COMPRESSED_BYTES_CHANGED));
}
@Test
public void testGeneratePreDiffPlan_OneUncompressedEntry() throws IOException {
// Test with uncompressed old and new. It doesn't matter whether the bytes are changed or
// unchanged in this case.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing because both entries are already uncompressed
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_STORED),
findEntry(newFile, ENTRY_A_STORED),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.BOTH_ENTRIES_UNCOMPRESSED));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_CompressedToUncompressed() throws IOException {
// Test the migration of an entry from compressed (old archive) to uncompressed (new archive).
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress the entry in the old archive and do nothing in the new
// archive (empty plan)
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_9),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_9),
findEntry(newFile, ENTRY_A_STORED),
Recommendation.UNCOMPRESS_OLD,
RecommendationReason.COMPRESSED_CHANGED_TO_UNCOMPRESSED));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_UncompressedToCompressed() throws IOException {
// Test the migration of an entry from uncompressed (old archive) to compressed (new archive).
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing in the old archive (empty plan) and uncompress the entry in
// the new archive
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertEquals(1, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithParams(newFile, ENTRY_A_LEVEL_6), plan.getNewFileUncompressionPlan().get(0));
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_STORED),
findEntry(newFile, ENTRY_A_LEVEL_6),
Recommendation.UNCOMPRESS_NEW,
RecommendationReason.UNCOMPRESSED_CHANGED_TO_COMPRESSED));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_UncompressedToUndivinable() throws IOException {
// Test the migration of an entry from uncompressed (old archive) to compressed (new archive),
// but make the new entry un-divinable and therefore un-recompressible.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
// Deliberately break the entry in the new file so that it will not be divinable
corruptEntryData(newFile, ENTRY_A_LEVEL_6);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan WOULD be to do nothing in the old archive (empty plan) and uncompress the entry in
// the new archive, but because the new entry is un-divinable it cannot be recompressed and so
// the plan for the new archive should be empty as well.
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_STORED),
findEntry(newFile, ENTRY_A_LEVEL_6),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.DEFLATE_UNSUITABLE));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_OldUncompressed_NewNonDeflate() throws IOException {
// Test the case where the entry is compressed with something other than deflate in the new
// archive; it is thus not reproducible, not divinable, and therefore cannot be uncompressed.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
corruptCompressionMethod(newFile, ENTRY_A_LEVEL_9);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing (empty plans) because the the entry in the old archive is
// already uncompressed and the entry in the new archive is not compressed with deflate (i.e.,
// cannot be recompressed so cannot be touched).
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_STORED),
findEntry(newFile, ENTRY_A_LEVEL_9),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.UNSUITABLE));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_OldNonDeflate_NewUncompressed() throws IOException {
// Test the case where the entry is compressed with something other than deflate in the old
// archive; it can't be uncompressed, so there's no point in modifying the new entry either.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
corruptCompressionMethod(oldFile, ENTRY_A_LEVEL_9);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing (empty plans) because the the entry in the old archive is
// not compressed with deflate, so there is no point in trying to do anything at all.
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_9),
findEntry(newFile, ENTRY_A_STORED),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.UNSUITABLE));
}
@Test
public void testGeneratePreDiffPlan_OneEntry_BothNonDeflate() throws IOException {
// Test the case where the entry is compressed with something other than deflate; it is thus
// not reproducible, not divinable, and therefore cannot be uncompressed.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
corruptCompressionMethod(oldFile, ENTRY_A_LEVEL_6);
corruptCompressionMethod(newFile, ENTRY_A_LEVEL_9);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing (empty plans) because the entries are not compressed with
// deflate
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(plan, new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, ENTRY_A_LEVEL_9),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.UNSUITABLE));
}
@Test
public void testGeneratePreDiffPlan_TwoDifferentEntries_DifferentPaths() throws IOException {
// Test the case where file paths are different as well as content within those files, i.e. each
// entry is exclusive to its archive and is not the same
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_B_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing (empty plans) because entry A is only in the old archive and
// entry B is only in the new archive, so there is nothing to diff.
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getQualifiedRecommendations().isEmpty());
}
@Test
public void testGeneratePreDiffPlan_TwoEntriesEachArchive_SwappingOrder() throws IOException {
// Test the case where two entries in each archive have both changed, AND they have changed
// places in the file. The plan is supposed to be in file order, so that streaming is possible;
// check that it is so.
byte[] oldBytes =
UnitTestZipArchive.makeTestZip(Arrays.asList(ENTRY_A_LEVEL_6, ENTRY_B_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Arrays.asList(ENTRY_B_LEVEL_9, ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress both entries, but the order is important. File order should
// be in both plans.
Assert.assertEquals(2, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(2, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_B_LEVEL_6),
plan.getOldFileUncompressionPlan().get(1));
Assert.assertEquals(
findRangeWithParams(newFile, ENTRY_B_LEVEL_9), plan.getNewFileUncompressionPlan().get(0));
Assert.assertEquals(
findRangeWithParams(newFile, ENTRY_A_LEVEL_9), plan.getNewFileUncompressionPlan().get(1));
}
@Test
public void testGeneratePreDiffPlan_SimpleRename_Unchanged() throws IOException {
// Test the case where file paths are different but the uncompressed content is the same.
// The compression method used for both entries is identical, as are the compressed bytes.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to do nothing (empty plans) because the bytes are identical in both files
// so the entries should remain compressed. However, unlike the case where there was no match,
// there is now a qualified recommendation in the returned list.
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, SHADOW_ENTRY_A_LEVEL_6),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.COMPRESSED_BYTES_IDENTICAL));
}
@Test
public void testGeneratePreDiffPlan_SimpleRename_CompressionLevelChanged() throws IOException {
// Test the case where file paths are different but the uncompressed content is the same.
// The compression method used for each entry is different but the CRC32 is still the same, so
// unlike like the plan with identical entries this time the plan should be to uncompress both
// entries, allowing a super-efficient delta.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress both entries so that a super-efficient delta can be done.
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertEquals(1, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithParams(newFile, SHADOW_ENTRY_A_LEVEL_9),
plan.getNewFileUncompressionPlan().get(0));
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, SHADOW_ENTRY_A_LEVEL_9),
Recommendation.UNCOMPRESS_BOTH,
RecommendationReason.COMPRESSED_BYTES_CHANGED));
}
@Test
public void testGeneratePreDiffPlan_ClonedAndCompressionLevelChanged() throws IOException {
// Test the case where an entry exists in both old and new APK with identical uncompressed
// content but different compressed content ***AND*** additionally a new copy exists in the new
// archive, also with identical uncompressed content and different compressed content, i.e.:
//
// OLD APK: NEW APK:
// ------------------------------------ -----------------------------------------------
// foo.xml (compressed level 6) foo.xml (compressed level 9, content unchanged)
// bar.xml (copy of foo.xml, compressed level 1)
//
// This test ensures that in such cases the foo.xml from the old apk is only enqueued for
// uncompression ONE TIME.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(
Arrays.asList(SHADOW_ENTRY_A_LEVEL_1, SHADOW_ENTRY_A_LEVEL_9));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress both entries so that a super-efficient delta can be done.
// Critically there should only be ONE command for the old file uncompression step!
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertEquals(2, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithParams(newFile, SHADOW_ENTRY_A_LEVEL_1),
plan.getNewFileUncompressionPlan().get(0));
Assert.assertEquals(
findRangeWithParams(newFile, SHADOW_ENTRY_A_LEVEL_9),
plan.getNewFileUncompressionPlan().get(1));
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, SHADOW_ENTRY_A_LEVEL_1),
Recommendation.UNCOMPRESS_BOTH,
RecommendationReason.COMPRESSED_BYTES_CHANGED),
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, SHADOW_ENTRY_A_LEVEL_9),
Recommendation.UNCOMPRESS_BOTH,
RecommendationReason.COMPRESSED_BYTES_CHANGED));
}
@Test
public void testGeneratePreDiffPlan_SimpleRename_CompressedToUncompressed() throws IOException {
// Test the case where file paths are different but the uncompressed content is the same.
// The compression method is changed from compressed to uncompressed but the rename should still
// be detected and the plan should be to uncompress the old entry only.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_STORED));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress the old entry so that a super-efficient delta can be done.
// The new entry isn't touched because it is already uncompressed.
Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithoutParams(oldFile, ENTRY_A_LEVEL_6),
plan.getOldFileUncompressionPlan().get(0));
Assert.assertTrue(plan.getNewFileUncompressionPlan().isEmpty());
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_LEVEL_6),
findEntry(newFile, SHADOW_ENTRY_A_STORED),
Recommendation.UNCOMPRESS_OLD,
RecommendationReason.COMPRESSED_CHANGED_TO_UNCOMPRESSED));
}
@Test
public void testGeneratePreDiffPlan_SimpleRename_UncompressedToCompressed() throws IOException {
// Test the case where file paths are different but the uncompressed content is the same.
// The compression method is changed from uncompressed to compressed but the rename should still
// be detected and the plan should be to uncompress the new entry only.
byte[] oldBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED));
byte[] newBytes =
UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_6));
File oldFile = storeAndMapArchive(oldBytes);
File newFile = storeAndMapArchive(newBytes);
PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile);
Assert.assertNotNull(plan);
// The plan should be to uncompress the new entry so that a super-efficient delta can be done.
// The old entry isn't touched because it is already uncompressed.
Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty());
Assert.assertEquals(1, plan.getNewFileUncompressionPlan().size());
Assert.assertEquals(
findRangeWithParams(newFile, SHADOW_ENTRY_A_LEVEL_6),
plan.getNewFileUncompressionPlan().get(0));
checkRecommendation(
plan,
new QualifiedRecommendation(
findEntry(oldFile, ENTRY_A_STORED),
findEntry(newFile, SHADOW_ENTRY_A_LEVEL_6),
Recommendation.UNCOMPRESS_NEW,
RecommendationReason.UNCOMPRESSED_CHANGED_TO_COMPRESSED));
}
}
|
googleapis/google-cloud-java | 36,634 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListInfoTypesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Request for the list of infoTypes.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListInfoTypesRequest}
*/
public final class ListInfoTypesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListInfoTypesRequest)
ListInfoTypesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInfoTypesRequest.newBuilder() to construct.
private ListInfoTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInfoTypesRequest() {
parent_ = "";
languageCode_ = "";
filter_ = "";
locationId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListInfoTypesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListInfoTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListInfoTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListInfoTypesRequest.class,
com.google.privacy.dlp.v2.ListInfoTypesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCATION_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object locationId_ = "";
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @return The locationId.
*/
@java.lang.Override
public java.lang.String getLocationId() {
java.lang.Object ref = locationId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @return The bytes for locationId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLocationIdBytes() {
java.lang.Object ref = locationId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, languageCode_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, locationId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, languageCode_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, locationId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.ListInfoTypesRequest)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.ListInfoTypesRequest other =
(com.google.privacy.dlp.v2.ListInfoTypesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getLocationId().equals(other.getLocationId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + LOCATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getLocationId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.ListInfoTypesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the list of infoTypes.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListInfoTypesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListInfoTypesRequest)
com.google.privacy.dlp.v2.ListInfoTypesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListInfoTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListInfoTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListInfoTypesRequest.class,
com.google.privacy.dlp.v2.ListInfoTypesRequest.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.ListInfoTypesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
languageCode_ = "";
filter_ = "";
locationId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListInfoTypesRequest_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListInfoTypesRequest getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.ListInfoTypesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListInfoTypesRequest build() {
com.google.privacy.dlp.v2.ListInfoTypesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListInfoTypesRequest buildPartial() {
com.google.privacy.dlp.v2.ListInfoTypesRequest result =
new com.google.privacy.dlp.v2.ListInfoTypesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.privacy.dlp.v2.ListInfoTypesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.languageCode_ = languageCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.locationId_ = locationId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.ListInfoTypesRequest) {
return mergeFrom((com.google.privacy.dlp.v2.ListInfoTypesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.ListInfoTypesRequest other) {
if (other == com.google.privacy.dlp.v2.ListInfoTypesRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getLocationId().isEmpty()) {
locationId_ = other.locationId_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 18
case 26:
{
locationId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The parent resource name.
*
* The format of this value is as follows:
*
* `locations/{location_id}`
* </pre>
*
* <code>string parent = 4;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* BCP-47 language code for localized infoType friendly
* names. If omitted, or if localized strings are not available,
* en-US strings will be returned.
* </pre>
*
* <code>string language_code = 1;</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* filter to only return infoTypes supported by certain parts of the
* API. Defaults to supported_by=INSPECT.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object locationId_ = "";
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @return The locationId.
*/
public java.lang.String getLocationId() {
java.lang.Object ref = locationId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @return The bytes for locationId.
*/
public com.google.protobuf.ByteString getLocationIdBytes() {
java.lang.Object ref = locationId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @param value The locationId to set.
* @return This builder for chaining.
*/
public Builder setLocationId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
locationId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearLocationId() {
locationId_ = getDefaultInstance().getLocationId();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated. This field has no effect.
* </pre>
*
* <code>string location_id = 3;</code>
*
* @param value The bytes for locationId to set.
* @return This builder for chaining.
*/
public Builder setLocationIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
locationId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListInfoTypesRequest)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListInfoTypesRequest)
private static final com.google.privacy.dlp.v2.ListInfoTypesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListInfoTypesRequest();
}
public static com.google.privacy.dlp.v2.ListInfoTypesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInfoTypesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListInfoTypesRequest>() {
@java.lang.Override
public ListInfoTypesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInfoTypesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInfoTypesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListInfoTypesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,796 | java-talent/proto-google-cloud-talent-v4/src/main/java/com/google/cloud/talent/v4/UpdateTenantRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/talent/v4/tenant_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.talent.v4;
/**
*
*
* <pre>
* Request for updating a specified tenant.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4.UpdateTenantRequest}
*/
public final class UpdateTenantRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.talent.v4.UpdateTenantRequest)
UpdateTenantRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTenantRequest.newBuilder() to construct.
private UpdateTenantRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTenantRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTenantRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4.TenantServiceProto
.internal_static_google_cloud_talent_v4_UpdateTenantRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4.TenantServiceProto
.internal_static_google_cloud_talent_v4_UpdateTenantRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4.UpdateTenantRequest.class,
com.google.cloud.talent.v4.UpdateTenantRequest.Builder.class);
}
private int bitField0_;
public static final int TENANT_FIELD_NUMBER = 1;
private com.google.cloud.talent.v4.Tenant tenant_;
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tenant field is set.
*/
@java.lang.Override
public boolean hasTenant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tenant.
*/
@java.lang.Override
public com.google.cloud.talent.v4.Tenant getTenant() {
return tenant_ == null ? com.google.cloud.talent.v4.Tenant.getDefaultInstance() : tenant_;
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.talent.v4.TenantOrBuilder getTenantOrBuilder() {
return tenant_ == null ? com.google.cloud.talent.v4.Tenant.getDefaultInstance() : tenant_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTenant());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTenant());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.talent.v4.UpdateTenantRequest)) {
return super.equals(obj);
}
com.google.cloud.talent.v4.UpdateTenantRequest other =
(com.google.cloud.talent.v4.UpdateTenantRequest) obj;
if (hasTenant() != other.hasTenant()) return false;
if (hasTenant()) {
if (!getTenant().equals(other.getTenant())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTenant()) {
hash = (37 * hash) + TENANT_FIELD_NUMBER;
hash = (53 * hash) + getTenant().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4.UpdateTenantRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.talent.v4.UpdateTenantRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for updating a specified tenant.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4.UpdateTenantRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.talent.v4.UpdateTenantRequest)
com.google.cloud.talent.v4.UpdateTenantRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4.TenantServiceProto
.internal_static_google_cloud_talent_v4_UpdateTenantRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4.TenantServiceProto
.internal_static_google_cloud_talent_v4_UpdateTenantRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4.UpdateTenantRequest.class,
com.google.cloud.talent.v4.UpdateTenantRequest.Builder.class);
}
// Construct using com.google.cloud.talent.v4.UpdateTenantRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTenantFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tenant_ = null;
if (tenantBuilder_ != null) {
tenantBuilder_.dispose();
tenantBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.talent.v4.TenantServiceProto
.internal_static_google_cloud_talent_v4_UpdateTenantRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.talent.v4.UpdateTenantRequest getDefaultInstanceForType() {
return com.google.cloud.talent.v4.UpdateTenantRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.talent.v4.UpdateTenantRequest build() {
com.google.cloud.talent.v4.UpdateTenantRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.talent.v4.UpdateTenantRequest buildPartial() {
com.google.cloud.talent.v4.UpdateTenantRequest result =
new com.google.cloud.talent.v4.UpdateTenantRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.talent.v4.UpdateTenantRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tenant_ = tenantBuilder_ == null ? tenant_ : tenantBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.talent.v4.UpdateTenantRequest) {
return mergeFrom((com.google.cloud.talent.v4.UpdateTenantRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.talent.v4.UpdateTenantRequest other) {
if (other == com.google.cloud.talent.v4.UpdateTenantRequest.getDefaultInstance()) return this;
if (other.hasTenant()) {
mergeTenant(other.getTenant());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTenantFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.talent.v4.Tenant tenant_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4.Tenant,
com.google.cloud.talent.v4.Tenant.Builder,
com.google.cloud.talent.v4.TenantOrBuilder>
tenantBuilder_;
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tenant field is set.
*/
public boolean hasTenant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tenant.
*/
public com.google.cloud.talent.v4.Tenant getTenant() {
if (tenantBuilder_ == null) {
return tenant_ == null ? com.google.cloud.talent.v4.Tenant.getDefaultInstance() : tenant_;
} else {
return tenantBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTenant(com.google.cloud.talent.v4.Tenant value) {
if (tenantBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tenant_ = value;
} else {
tenantBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTenant(com.google.cloud.talent.v4.Tenant.Builder builderForValue) {
if (tenantBuilder_ == null) {
tenant_ = builderForValue.build();
} else {
tenantBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTenant(com.google.cloud.talent.v4.Tenant value) {
if (tenantBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& tenant_ != null
&& tenant_ != com.google.cloud.talent.v4.Tenant.getDefaultInstance()) {
getTenantBuilder().mergeFrom(value);
} else {
tenant_ = value;
}
} else {
tenantBuilder_.mergeFrom(value);
}
if (tenant_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTenant() {
bitField0_ = (bitField0_ & ~0x00000001);
tenant_ = null;
if (tenantBuilder_ != null) {
tenantBuilder_.dispose();
tenantBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.talent.v4.Tenant.Builder getTenantBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTenantFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.talent.v4.TenantOrBuilder getTenantOrBuilder() {
if (tenantBuilder_ != null) {
return tenantBuilder_.getMessageOrBuilder();
} else {
return tenant_ == null ? com.google.cloud.talent.v4.Tenant.getDefaultInstance() : tenant_;
}
}
/**
*
*
* <pre>
* Required. The tenant resource to replace the current resource in the
* system.
* </pre>
*
* <code>.google.cloud.talent.v4.Tenant tenant = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4.Tenant,
com.google.cloud.talent.v4.Tenant.Builder,
com.google.cloud.talent.v4.TenantOrBuilder>
getTenantFieldBuilder() {
if (tenantBuilder_ == null) {
tenantBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.talent.v4.Tenant,
com.google.cloud.talent.v4.Tenant.Builder,
com.google.cloud.talent.v4.TenantOrBuilder>(
getTenant(), getParentForChildren(), isClean());
tenant_ = null;
}
return tenantBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Strongly recommended for the best service experience.
*
* If [update_mask][google.cloud.talent.v4.UpdateTenantRequest.update_mask] is
* provided, only the specified fields in
* [tenant][google.cloud.talent.v4.UpdateTenantRequest.tenant] are updated.
* Otherwise all the fields are updated.
*
* A field mask to specify the tenant fields to be updated. Only
* top level fields of [Tenant][google.cloud.talent.v4.Tenant] are supported.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.talent.v4.UpdateTenantRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.talent.v4.UpdateTenantRequest)
private static final com.google.cloud.talent.v4.UpdateTenantRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.talent.v4.UpdateTenantRequest();
}
public static com.google.cloud.talent.v4.UpdateTenantRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTenantRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTenantRequest>() {
@java.lang.Override
public UpdateTenantRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTenantRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTenantRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.talent.v4.UpdateTenantRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,842 | java-translate/proto-google-cloud-translate-v3beta1/src/main/java/com/google/cloud/translate/v3beta1/BatchDocumentInputConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3beta1/translation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3beta1;
/**
*
*
* <pre>
* Input configuration for BatchTranslateDocument request.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3beta1.BatchDocumentInputConfig}
*/
public final class BatchDocumentInputConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3beta1.BatchDocumentInputConfig)
BatchDocumentInputConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchDocumentInputConfig.newBuilder() to construct.
private BatchDocumentInputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchDocumentInputConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchDocumentInputConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3beta1.TranslationServiceProto
.internal_static_google_cloud_translation_v3beta1_BatchDocumentInputConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3beta1.TranslationServiceProto
.internal_static_google_cloud_translation_v3beta1_BatchDocumentInputConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.class,
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.Builder.class);
}
private int sourceCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object source_;
public enum SourceCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
GCS_SOURCE(1),
SOURCE_NOT_SET(0);
private final int value;
private SourceCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SourceCase valueOf(int value) {
return forNumber(value);
}
public static SourceCase forNumber(int value) {
switch (value) {
case 1:
return GCS_SOURCE;
case 0:
return SOURCE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public SourceCase getSourceCase() {
return SourceCase.forNumber(sourceCase_);
}
public static final int GCS_SOURCE_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*
* @return Whether the gcsSource field is set.
*/
@java.lang.Override
public boolean hasGcsSource() {
return sourceCase_ == 1;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*
* @return The gcsSource.
*/
@java.lang.Override
public com.google.cloud.translate.v3beta1.GcsSource getGcsSource() {
if (sourceCase_ == 1) {
return (com.google.cloud.translate.v3beta1.GcsSource) source_;
}
return com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
if (sourceCase_ == 1) {
return (com.google.cloud.translate.v3beta1.GcsSource) source_;
}
return com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (sourceCase_ == 1) {
output.writeMessage(1, (com.google.cloud.translate.v3beta1.GcsSource) source_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (sourceCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.translate.v3beta1.GcsSource) source_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3beta1.BatchDocumentInputConfig)) {
return super.equals(obj);
}
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig other =
(com.google.cloud.translate.v3beta1.BatchDocumentInputConfig) obj;
if (!getSourceCase().equals(other.getSourceCase())) return false;
switch (sourceCase_) {
case 1:
if (!getGcsSource().equals(other.getGcsSource())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (sourceCase_) {
case 1:
hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER;
hash = (53 * hash) + getGcsSource().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input configuration for BatchTranslateDocument request.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3beta1.BatchDocumentInputConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3beta1.BatchDocumentInputConfig)
com.google.cloud.translate.v3beta1.BatchDocumentInputConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3beta1.TranslationServiceProto
.internal_static_google_cloud_translation_v3beta1_BatchDocumentInputConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3beta1.TranslationServiceProto
.internal_static_google_cloud_translation_v3beta1_BatchDocumentInputConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.class,
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.Builder.class);
}
// Construct using com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (gcsSourceBuilder_ != null) {
gcsSourceBuilder_.clear();
}
sourceCase_ = 0;
source_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3beta1.TranslationServiceProto
.internal_static_google_cloud_translation_v3beta1_BatchDocumentInputConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3beta1.BatchDocumentInputConfig getDefaultInstanceForType() {
return com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3beta1.BatchDocumentInputConfig build() {
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3beta1.BatchDocumentInputConfig buildPartial() {
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig result =
new com.google.cloud.translate.v3beta1.BatchDocumentInputConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.translate.v3beta1.BatchDocumentInputConfig result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(
com.google.cloud.translate.v3beta1.BatchDocumentInputConfig result) {
result.sourceCase_ = sourceCase_;
result.source_ = this.source_;
if (sourceCase_ == 1 && gcsSourceBuilder_ != null) {
result.source_ = gcsSourceBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3beta1.BatchDocumentInputConfig) {
return mergeFrom((com.google.cloud.translate.v3beta1.BatchDocumentInputConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3beta1.BatchDocumentInputConfig other) {
if (other == com.google.cloud.translate.v3beta1.BatchDocumentInputConfig.getDefaultInstance())
return this;
switch (other.getSourceCase()) {
case GCS_SOURCE:
{
mergeGcsSource(other.getGcsSource());
break;
}
case SOURCE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry);
sourceCase_ = 1;
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int sourceCase_ = 0;
private java.lang.Object source_;
public SourceCase getSourceCase() {
return SourceCase.forNumber(sourceCase_);
}
public Builder clearSource() {
sourceCase_ = 0;
source_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.translate.v3beta1.GcsSource,
com.google.cloud.translate.v3beta1.GcsSource.Builder,
com.google.cloud.translate.v3beta1.GcsSourceOrBuilder>
gcsSourceBuilder_;
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*
* @return Whether the gcsSource field is set.
*/
@java.lang.Override
public boolean hasGcsSource() {
return sourceCase_ == 1;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*
* @return The gcsSource.
*/
@java.lang.Override
public com.google.cloud.translate.v3beta1.GcsSource getGcsSource() {
if (gcsSourceBuilder_ == null) {
if (sourceCase_ == 1) {
return (com.google.cloud.translate.v3beta1.GcsSource) source_;
}
return com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
} else {
if (sourceCase_ == 1) {
return gcsSourceBuilder_.getMessage();
}
return com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
public Builder setGcsSource(com.google.cloud.translate.v3beta1.GcsSource value) {
if (gcsSourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
source_ = value;
onChanged();
} else {
gcsSourceBuilder_.setMessage(value);
}
sourceCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
public Builder setGcsSource(
com.google.cloud.translate.v3beta1.GcsSource.Builder builderForValue) {
if (gcsSourceBuilder_ == null) {
source_ = builderForValue.build();
onChanged();
} else {
gcsSourceBuilder_.setMessage(builderForValue.build());
}
sourceCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
public Builder mergeGcsSource(com.google.cloud.translate.v3beta1.GcsSource value) {
if (gcsSourceBuilder_ == null) {
if (sourceCase_ == 1
&& source_ != com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance()) {
source_ =
com.google.cloud.translate.v3beta1.GcsSource.newBuilder(
(com.google.cloud.translate.v3beta1.GcsSource) source_)
.mergeFrom(value)
.buildPartial();
} else {
source_ = value;
}
onChanged();
} else {
if (sourceCase_ == 1) {
gcsSourceBuilder_.mergeFrom(value);
} else {
gcsSourceBuilder_.setMessage(value);
}
}
sourceCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
public Builder clearGcsSource() {
if (gcsSourceBuilder_ == null) {
if (sourceCase_ == 1) {
sourceCase_ = 0;
source_ = null;
onChanged();
}
} else {
if (sourceCase_ == 1) {
sourceCase_ = 0;
source_ = null;
}
gcsSourceBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
public com.google.cloud.translate.v3beta1.GcsSource.Builder getGcsSourceBuilder() {
return getGcsSourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) {
return gcsSourceBuilder_.getMessageOrBuilder();
} else {
if (sourceCase_ == 1) {
return (com.google.cloud.translate.v3beta1.GcsSource) source_;
}
return com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Google Cloud Storage location for the source input.
* This can be a single file (for example,
* `gs://translation-test/input.docx`) or a wildcard (for example,
* `gs://translation-test/*`).
*
* File mime type is determined based on extension. Supported mime type
* includes:
* - `pdf`, application/pdf
* - `docx`,
* application/vnd.openxmlformats-officedocument.wordprocessingml.document
* - `pptx`,
* application/vnd.openxmlformats-officedocument.presentationml.presentation
* - `xlsx`,
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
*
* The max file size to support for `.docx`, `.pptx` and `.xlsx` is 100MB.
* The max file size to support for `.pdf` is 1GB and the max page limit is
* 1000 pages.
* The max file size to support for all input documents is 1GB.
* </pre>
*
* <code>.google.cloud.translation.v3beta1.GcsSource gcs_source = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.translate.v3beta1.GcsSource,
com.google.cloud.translate.v3beta1.GcsSource.Builder,
com.google.cloud.translate.v3beta1.GcsSourceOrBuilder>
getGcsSourceFieldBuilder() {
if (gcsSourceBuilder_ == null) {
if (!(sourceCase_ == 1)) {
source_ = com.google.cloud.translate.v3beta1.GcsSource.getDefaultInstance();
}
gcsSourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.translate.v3beta1.GcsSource,
com.google.cloud.translate.v3beta1.GcsSource.Builder,
com.google.cloud.translate.v3beta1.GcsSourceOrBuilder>(
(com.google.cloud.translate.v3beta1.GcsSource) source_,
getParentForChildren(),
isClean());
source_ = null;
}
sourceCase_ = 1;
onChanged();
return gcsSourceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3beta1.BatchDocumentInputConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchDocumentInputConfig)
private static final com.google.cloud.translate.v3beta1.BatchDocumentInputConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3beta1.BatchDocumentInputConfig();
}
public static com.google.cloud.translate.v3beta1.BatchDocumentInputConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchDocumentInputConfig> PARSER =
new com.google.protobuf.AbstractParser<BatchDocumentInputConfig>() {
@java.lang.Override
public BatchDocumentInputConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchDocumentInputConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchDocumentInputConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3beta1.BatchDocumentInputConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/maven-enforcer | 36,853 | enforcer-rules/src/main/java/org/apache/maven/enforcer/rules/RequirePluginVersions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.enforcer.rules;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.apache.maven.BuildFailureException;
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.enforcer.rule.api.EnforcerRuleError;
import org.apache.maven.enforcer.rule.api.EnforcerRuleException;
import org.apache.maven.enforcer.rules.utils.EnforcerRuleUtils;
import org.apache.maven.enforcer.rules.utils.ExpressionEvaluator;
import org.apache.maven.enforcer.rules.utils.PluginWrapper;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.lifecycle.DefaultLifecycles;
import org.apache.maven.lifecycle.Lifecycle;
import org.apache.maven.lifecycle.LifecycleExecutionException;
import org.apache.maven.lifecycle.mapping.LifecycleMapping;
import org.apache.maven.model.BuildBase;
import org.apache.maven.model.Model;
import org.apache.maven.model.ModelBase;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.PluginConfiguration;
import org.apache.maven.model.PluginContainer;
import org.apache.maven.model.Profile;
import org.apache.maven.model.ReportPlugin;
import org.apache.maven.model.Reporting;
import org.apache.maven.plugin.InvalidPluginException;
import org.apache.maven.plugin.PluginManager;
import org.apache.maven.plugin.PluginManagerException;
import org.apache.maven.plugin.PluginNotFoundException;
import org.apache.maven.plugin.version.PluginVersionNotFoundException;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.rtinfo.RuntimeInformation;
import org.apache.maven.settings.Settings;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluationException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.util.StringUtils;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import static java.util.Optional.ofNullable;
/**
* This rule will enforce that all plugins specified in the poms have a version declared.
*
* @author <a href="mailto:brianf@apache.org">Brian Fox</a>
*/
@Named("requirePluginVersions")
public final class RequirePluginVersions extends AbstractStandardEnforcerRule {
/**
* Don't allow the LATEST identifier.
*/
private boolean banLatest = true;
/**
* Don't allow the RELEASE identifier.
*/
private boolean banRelease = true;
/**
* Don't allow snapshot plugins.
*/
private boolean banSnapshots = true;
/**
* Don't allow timestamp snapshot plugins.
*/
private boolean banTimestamps = true;
/**
* @since 3.0.0
*/
private boolean banMavenDefaults = true;
/**
* The comma separated list of phases that should be used to find lifecycle plugin bindings. The default value is
* "clean,deploy,site".
*/
private String phases = "clean,deploy,site";
/**
* Additional plugins to enforce have versions. These are plugins that may not be in the poms but are used anyway,
* like help, eclipse etc. <br>
* The plugins should be specified in the form: <code>group:artifactId</code>.
*/
private List<String> additionalPlugins;
/**
* Plugins to skip for version enforcement. The plugins should be specified in the form:
* <code>group:artifactId</code>. NOTE: This is deprecated, use unCheckedPluginList instead.
*/
private List<String> unCheckedPlugins;
/**
* Same as unCheckedPlugins but as a comma list to better support properties. Sample form:
* <code>group:artifactId,group2:artifactId2</code>
*
* @since 1.0-beta-1
*/
private String unCheckedPluginList;
/** The phase to lifecycle map. */
private Map<String, Lifecycle> phaseToLifecycleMap;
/** The lifecycles. */
private Collection<Lifecycle> lifecycles;
/** The plugin manager. */
private final PluginManager pluginManager;
/** The factory. */
private final ArtifactFactory factory;
private final RepositorySystem repositorySystem;
/** The session. */
private final MavenSession session;
/** The utils. */
private final EnforcerRuleUtils utils;
private final RuntimeInformation runtimeInformation;
private final DefaultLifecycles defaultLifeCycles;
private final MavenProject project;
private final ExpressionEvaluator evaluator;
private final PlexusContainer container;
@SuppressWarnings("checkstyle:ParameterNumber")
@Inject
public RequirePluginVersions(
PluginManager pluginManager,
ArtifactFactory factory,
RepositorySystem repositorySystem,
MavenSession session,
EnforcerRuleUtils utils,
RuntimeInformation runtimeInformation,
DefaultLifecycles defaultLifeCycles,
MavenProject project,
ExpressionEvaluator evaluator,
PlexusContainer container) {
this.pluginManager = Objects.requireNonNull(pluginManager);
this.factory = Objects.requireNonNull(factory);
this.repositorySystem = Objects.requireNonNull(repositorySystem);
this.session = Objects.requireNonNull(session);
this.utils = Objects.requireNonNull(utils);
this.runtimeInformation = Objects.requireNonNull(runtimeInformation);
this.defaultLifeCycles = Objects.requireNonNull(defaultLifeCycles);
this.project = Objects.requireNonNull(project);
this.evaluator = Objects.requireNonNull(evaluator);
this.container = Objects.requireNonNull(container);
}
@Override
public void execute() throws EnforcerRuleException {
try {
// get the various expressions out of the helper.
lifecycles = defaultLifeCycles.getLifeCycles();
// get all the plugins that are bound to the specified lifecycles
Set<Plugin> allPlugins = getBoundPlugins(project, phases);
// insert any additional plugins specified by the user.
allPlugins = addAdditionalPlugins(allPlugins, additionalPlugins);
allPlugins.addAll(getProfilePlugins(project));
// pull out any we should skip
allPlugins =
removeUncheckedPlugins(combineUncheckedPlugins(unCheckedPlugins, unCheckedPluginList), allPlugins);
// there's nothing to do here
if (allPlugins.isEmpty()) {
getLog().info("No plugin bindings found.");
return;
} else {
getLog().debug("All Plugins in use: " + allPlugins);
}
// get all the plugins that are mentioned in the pom (and parents)
List<PluginWrapper> pluginWrappers = getAllPluginEntries(project);
for (PluginWrapper pluginWrapper : pluginWrappers) {
getLog().debug("pluginWrappers: " + pluginWrapper.getGroupId() + ":" + pluginWrapper.getArtifactId()
+ ":" + pluginWrapper.getVersion() + " source: " + pluginWrapper.getSource());
}
// now look for the versions that aren't valid and add to a list.
List<Plugin> failures = new ArrayList<>();
for (Plugin plugin : allPlugins) {
if (!hasValidVersionSpecified(plugin, pluginWrappers)) {
failures.add(plugin);
}
}
// if anything was found, log it then append the optional message.
if (!failures.isEmpty()) {
handleMessagesToTheUser(project, failures);
}
} catch (PluginNotFoundException | LifecycleExecutionException e) {
throw new EnforcerRuleException(e.getLocalizedMessage(), e);
}
}
private void handleMessagesToTheUser(MavenProject project, List<Plugin> failures) throws EnforcerRuleException {
StringBuilder newMsg = new StringBuilder();
newMsg.append("Some plugins are missing valid versions or depend on Maven ");
newMsg.append(runtimeInformation.getMavenVersion());
newMsg.append(" defaults");
handleBanMessages(newMsg);
newMsg.append(System.lineSeparator());
for (Plugin plugin : failures) {
newMsg.append(" ");
newMsg.append(plugin.getGroupId());
newMsg.append(":");
newMsg.append(plugin.getArtifactId());
try {
newMsg.append(". \tThe version currently in use is ");
Plugin currentPlugin = findCurrentPlugin(plugin, project);
if (currentPlugin == null) {
newMsg.append("unknown");
} else {
newMsg.append(currentPlugin.getVersion());
if (PluginWrapper.isVersionFromDefaultLifecycleBindings(currentPlugin)
.orElse(false)) {
newMsg.append(" via default lifecycle bindings");
} else {
String msg = PluginWrapper.isVersionFromSuperpom(currentPlugin)
.filter(b -> b)
.map(t -> " via super POM")
// for Maven 3.6.0 or before (MNG-6593 / MNG-6600)
.orElse(" via super POM or default lifecycle bindings");
newMsg.append(msg);
}
}
} catch (Exception e) {
// lots can go wrong here. Don't allow any issues trying to
// determine the issue stop me
getLog().debug("Exception while determining plugin Version " + e.getMessage());
newMsg.append(". Unable to determine the plugin version.");
}
newMsg.append(System.lineSeparator());
}
String message = getMessage();
if (message != null && !message.isEmpty()) {
newMsg.append(message);
}
throw new EnforcerRuleException(newMsg.toString());
}
private void handleBanMessages(StringBuilder newMsg) {
if (banLatest || banRelease || banSnapshots || banTimestamps) {
List<String> banList = new ArrayList<>();
if (banLatest) {
banList.add("LATEST");
}
if (banRelease) {
banList.add("RELEASE");
}
if (banSnapshots) {
banList.add("SNAPSHOT");
if (banTimestamps) {
banList.add("TIMESTAMP SNAPSHOT");
}
}
if (!banList.isEmpty()) {
newMsg.append(" (");
newMsg.append(String.join(", ", banList));
newMsg.append(" as plugin version are not allowed)");
}
}
}
/**
* Remove the plugins that the user doesn't want to check.
*
* @param uncheckedPlugins
* @param plugins
* @return The plugins which have been removed.
*/
Set<Plugin> removeUncheckedPlugins(Collection<String> uncheckedPlugins, Set<Plugin> plugins)
throws EnforcerRuleError {
if (uncheckedPlugins != null && !uncheckedPlugins.isEmpty()) {
for (String pluginKey : uncheckedPlugins) {
Plugin plugin = parsePluginString(pluginKey, "UncheckedPlugins");
plugins.remove(plugin);
}
}
return plugins;
}
/**
* Combines the old Collection with the new comma separated list.
*
* @param uncheckedPlugins a new collections
* @param uncheckedPluginsList a list to merge
* @return List of unchecked plugins.
*/
public Collection<String> combineUncheckedPlugins(
Collection<String> uncheckedPlugins, String uncheckedPluginsList) {
// if the comma list is empty, then there's nothing to do here.
if (uncheckedPluginsList != null && !uncheckedPluginsList.isEmpty()) {
// make sure there is a collection to add to.
if (uncheckedPlugins == null) {
uncheckedPlugins = new HashSet<>();
} else if (!uncheckedPlugins.isEmpty()) {
getLog().warn("The parameter 'unCheckedPlugins' is deprecated. Use 'unCheckedPluginList' instead");
}
uncheckedPlugins.addAll(Arrays.asList(uncheckedPluginsList.split(",")));
}
return uncheckedPlugins;
}
/**
* Add the additional plugins if they don't exist yet.
*
* @param existing the existing plugins
* @param additional the additional plugins
* @return the additional and existing plugins
* @throws EnforcerRuleError the enforcer error
*/
public Set<Plugin> addAdditionalPlugins(Set<Plugin> existing, List<String> additional) throws EnforcerRuleError {
if (additional != null) {
if (existing == null) {
existing = new HashSet<>();
}
for (String pluginString : additional) {
Plugin plugin = parsePluginString(pluginString, "AdditionalPlugins");
existing.add(plugin);
}
}
return existing;
}
/**
* Helper method to parse and inject a Plugin.
*
* @param pluginString a plugin description to parse
* @param field a source of pluginString
* @return the prepared plugin
*/
private Plugin parsePluginString(String pluginString, String field) throws EnforcerRuleError {
if (pluginString != null) {
String[] pluginStrings = pluginString.split(":");
if (pluginStrings.length == 2) {
Plugin plugin = new Plugin();
plugin.setGroupId(StringUtils.strip(pluginStrings[0]));
plugin.setArtifactId(StringUtils.strip(pluginStrings[1]));
return plugin;
} else {
throw new EnforcerRuleError("Invalid " + field + " string: " + pluginString);
}
} else {
throw new EnforcerRuleError("Invalid " + field + " null plugin string.");
}
}
/**
* Finds the plugins that are listed in active profiles.
*
* @param project the project
* @return the profile plugins
*/
public Set<Plugin> getProfilePlugins(MavenProject project) {
Set<Plugin> result = new HashSet<>();
List<Profile> profiles = project.getActiveProfiles();
if (profiles != null && !profiles.isEmpty()) {
for (Profile p : profiles) {
BuildBase b = p.getBuild();
if (b != null) {
List<Plugin> plugins = b.getPlugins();
if (plugins != null) {
result.addAll(plugins);
}
}
}
}
return result;
}
/**
* Given a plugin, this will retrieve the matching plugin artifact from the model.
*
* @param plugin plugin to lookup
* @param project project to search
* @return matching plugin, <code>null</code> if not found.
*/
private Plugin findCurrentPlugin(Plugin plugin, MavenProject project) throws EnforcerRuleException {
Plugin found = null;
try {
Model model = project.getModel();
Map<String, Plugin> plugins = model.getBuild().getPluginsAsMap();
found = plugins.get(plugin.getKey());
} catch (NullPointerException e) {
// nothing to do here
}
if (found == null) {
Artifact artifact = factory.createPluginArtifact(
plugin.getGroupId(), plugin.getArtifactId(), VersionRange.createFromVersion("LATEST"));
try {
repositorySystem.resolveArtifact(
session.getRepositorySession(),
new ArtifactRequest(
RepositoryUtils.toArtifact(artifact),
session.getCurrentProject().getRemotePluginRepositories(),
"resolvePlugin"));
} catch (ArtifactResolutionException e) {
throw new EnforcerRuleException("Unable to resolve the plugin " + artifact.getArtifactId(), e);
}
plugin.setVersion(artifact.getVersion());
found = plugin;
}
return found;
}
/**
* Gets the plugins that are bound to the defined phases. This does not find plugins bound in the pom to a phase
* later than the plugin is executing.
*
* @param project the project
* @param phases the phases
* @return the bound plugins
* @throws PluginNotFoundException the plugin not found exception
* @throws LifecycleExecutionException the lifecycle execution exception
*/
private Set<Plugin> getBoundPlugins(MavenProject project, String phases)
throws PluginNotFoundException, LifecycleExecutionException {
Set<Plugin> allPlugins = new HashSet<>();
// lookup the bindings for all the passed in phases
String[] lifecyclePhases = phases.split(",");
for (int i = 0; i < lifecyclePhases.length; i++) {
String lifecyclePhase = lifecyclePhases[i];
if (lifecyclePhase != null && !lifecyclePhase.isEmpty()) {
try {
Lifecycle lifecycle = getLifecycleForPhase(lifecyclePhase);
getLog().debug("getBoundPlugins(): " + project.getId() + " " + lifecyclePhase + " "
+ lifecycle.getId());
allPlugins.addAll(getAllPlugins(project, lifecycle));
} catch (BuildFailureException e) {
// swallow this because the
// user may have declared a phase that
// doesn't exist for every module.
}
}
}
return allPlugins;
}
/**
* Checks for valid version specified. Checks to see if the version is specified for the plugin. Can optionally ban
* "RELEASE" or "LATEST" even if specified.
*
* @param source the source
* @param pluginWrappers the plugins
* @return true, if successful
*/
public boolean hasValidVersionSpecified(Plugin source, List<PluginWrapper> pluginWrappers) {
boolean found = false;
boolean status = false;
for (PluginWrapper plugin : pluginWrappers) {
// find the matching plugin entry
if (isMatchingPlugin(source, plugin)) {
found = true;
// found the entry. now see if the version is specified
String version = plugin.getVersion();
try {
version = (String) evaluator.evaluate(version);
} catch (ExpressionEvaluationException e) {
return false;
}
if (isValidVersion(version)) {
getLog().debug("checking for notEmpty and notIsWhitespace(): " + version);
if (banRelease && version.equals("RELEASE")) {
return false;
}
if (banLatest && version.equals("LATEST")) {
return false;
}
if (banSnapshots && isSnapshot(version)) {
return false;
}
// the version was specified and not
// banned. It's ok. Keep looking through the list to make
// sure it's not using a banned version somewhere else.
status = true;
if (!banRelease && !banLatest && !banSnapshots) {
// no need to keep looking
break;
}
}
}
}
if (!found) {
getLog().debug("plugin " + source.getGroupId() + ":" + source.getArtifactId() + " not found");
}
return status;
}
private boolean isValidVersion(String version) {
return (version != null && !version.isEmpty()) && !StringUtils.isWhitespace(version);
}
private boolean isMatchingPlugin(Plugin source, PluginWrapper plugin) {
return source.getArtifactId().equals(plugin.getArtifactId())
&& source.getGroupId().equals(plugin.getGroupId());
}
/**
* Checks if is snapshot.
*
* @param baseVersion the base version
* @return true, if is snapshot
*/
private boolean isSnapshot(String baseVersion) {
if (banTimestamps) {
return Artifact.VERSION_FILE_PATTERN.matcher(baseVersion).matches()
|| baseVersion.endsWith(Artifact.SNAPSHOT_VERSION);
} else {
return baseVersion.endsWith(Artifact.SNAPSHOT_VERSION);
}
}
/*
* Uses borrowed lifecycle code to get a list of all plugins bound to the lifecycle.
*/
/**
* Gets the all plugins.
*
* @param project the project
* @param lifecycle the lifecycle
* @return the all plugins
* @throws PluginNotFoundException the plugin not found exception
* @throws LifecycleExecutionException the lifecycle execution exception
*/
private Set<Plugin> getAllPlugins(MavenProject project, Lifecycle lifecycle)
throws PluginNotFoundException, LifecycleExecutionException {
getLog().debug("RequirePluginVersions.getAllPlugins:");
Set<Plugin> plugins = new HashSet<>();
// first, bind those associated with the packaging
Map<String, String> mappings = findMappingsForLifecycle(project, lifecycle);
for (Map.Entry<String, String> entry : mappings.entrySet()) {
getLog().debug(" lifecycleMapping = " + entry.getKey());
String pluginsForLifecycle = entry.getValue();
getLog().debug(" plugins = " + pluginsForLifecycle);
if (pluginsForLifecycle != null && !pluginsForLifecycle.isEmpty()) {
String pluginList[] = pluginsForLifecycle.split(",");
for (String plugin : pluginList) {
plugin = StringUtils.strip(plugin);
getLog().debug(" plugin = " + plugin);
String tokens[] = plugin.split(":");
getLog().debug(" GAV = " + Arrays.asList(tokens));
Plugin p = new Plugin();
p.setGroupId(tokens[0]);
p.setArtifactId(tokens[1]);
plugins.add(p);
}
}
}
plugins.addAll(project.getBuildPlugins());
return plugins;
}
/*
* NOTE: All the code following this point was scooped from the DefaultLifecycleExecutor. There must be a better way
* but for now it should work.
*/
/**
* Gets the phase to lifecycle map.
*
* @return the phase to lifecycle map
* @throws LifecycleExecutionException the lifecycle execution exception
*/
public Map<String, Lifecycle> getPhaseToLifecycleMap() throws LifecycleExecutionException {
if (phaseToLifecycleMap == null) {
phaseToLifecycleMap = new HashMap<>();
for (Lifecycle lifecycle : lifecycles) {
List<String> phases = lifecycle.getPhases();
for (String phase : phases) {
getLog().debug("getPhaseToLifecycleMap(): phase: " + phase);
if (phaseToLifecycleMap.containsKey(phase)) {
Lifecycle prevLifecycle = phaseToLifecycleMap.get(phase);
throw new LifecycleExecutionException("Phase '" + phase
+ "' is defined in more than one lifecycle: '" + lifecycle.getId() + "' and '"
+ prevLifecycle.getId() + "'");
} else {
phaseToLifecycleMap.put(phase, lifecycle);
}
}
}
}
return phaseToLifecycleMap;
}
/**
* Gets the lifecycle for phase.
*
* @param phase the phase
* @return the lifecycle for phase
* @throws BuildFailureException the build failure exception
* @throws LifecycleExecutionException the lifecycle execution exception
*/
private Lifecycle getLifecycleForPhase(String phase) throws BuildFailureException, LifecycleExecutionException {
Lifecycle lifecycle = getPhaseToLifecycleMap().get(phase);
if (lifecycle == null) {
throw new BuildFailureException("Unable to find lifecycle for phase '" + phase + "'");
}
return lifecycle;
}
/**
* Find mappings for lifecycle.
*
* @param project the project
* @param lifecycle the lifecycle
* @return the map
* @throws LifecycleExecutionException the lifecycle execution exception
* @throws PluginNotFoundException the plugin not found exception
*/
private Map<String, String> findMappingsForLifecycle(MavenProject project, Lifecycle lifecycle)
throws LifecycleExecutionException, PluginNotFoundException {
String packaging = project.getPackaging();
Map<String, String> mappings = null;
LifecycleMapping m = (LifecycleMapping) findExtension(
project, LifecycleMapping.ROLE, packaging, session.getSettings(), session.getLocalRepository());
if (m != null) {
mappings = m.getPhases(lifecycle.getId());
}
Map<String, String> defaultMappings = lifecycle.getDefaultPhases();
if (mappings == null) {
try {
m = container.lookup(LifecycleMapping.class, packaging);
mappings = m.getPhases(lifecycle.getId());
} catch (ComponentLookupException e) {
if (defaultMappings == null) {
throw new LifecycleExecutionException(
"Cannot find lifecycle mapping for packaging: '" + packaging + "'.", e);
}
}
}
if (mappings == null) {
if (defaultMappings == null) {
throw new LifecycleExecutionException(
"Cannot find lifecycle mapping for packaging: '" + packaging + "', and there is no default");
} else {
mappings = defaultMappings;
}
}
return mappings;
}
/**
* Find extension.
*
* @param project the project
* @param role the role
* @param roleHint the role hint
* @param settings the settings
* @param localRepository the local repository
* @return the object
* @throws LifecycleExecutionException the lifecycle execution exception
* @throws PluginNotFoundException the plugin not found exception
*/
private Object findExtension(
MavenProject project, String role, String roleHint, Settings settings, ArtifactRepository localRepository)
throws LifecycleExecutionException, PluginNotFoundException {
Object pluginComponent = null;
List<Plugin> buildPlugins = project.getBuildPlugins();
for (Plugin plugin : buildPlugins) {
if (plugin.isExtensions()) {
verifyPlugin(plugin, project, settings, localRepository);
// TODO: if moved to the plugin manager we
// already have the descriptor from above
// and so do can lookup the container
// directly
try {
pluginComponent = pluginManager.getPluginComponent(plugin, role, roleHint);
if (pluginComponent != null) {
break;
}
} catch (ComponentLookupException e) {
getLog().debug("Unable to find the lifecycle component in the extension " + e.getMessage());
} catch (PluginManagerException e) {
throw new LifecycleExecutionException(
"Error getting extensions from the plugin '" + plugin.getKey() + "': " + e.getMessage(), e);
}
}
}
return pluginComponent;
}
/**
* Verify plugin.
*
* @param plugin the plugin
* @param project the project
* @param settings the settings
* @param localRepository the local repository
* @return the plugin descriptor
* @throws LifecycleExecutionException the lifecycle execution exception
* @throws PluginNotFoundException the plugin not found exception
*/
private void verifyPlugin(
Plugin plugin, MavenProject project, Settings settings, ArtifactRepository localRepository)
throws LifecycleExecutionException, PluginNotFoundException {
try {
pluginManager.verifyPlugin(plugin, project, settings, localRepository);
} catch (PluginManagerException e) {
throw new LifecycleExecutionException(
"Internal error in the plugin manager getting plugin '" + plugin.getKey() + "': " + e.getMessage(),
e);
} catch (PluginVersionResolutionException
| InvalidVersionSpecificationException
| InvalidPluginException
| PluginVersionNotFoundException
| org.apache.maven.artifact.resolver.ArtifactResolutionException
| ArtifactNotFoundException e) {
throw new LifecycleExecutionException(e.getMessage(), e);
}
}
/**
* Gets all plugin entries in build.plugins, build.pluginManagement.plugins, profile.build.plugins, reporting and
* profile.reporting in this project and all parents
*
* @param project the project
* @return the all plugin entries wrapped in a PluginWrapper Object
*/
private List<PluginWrapper> getAllPluginEntries(MavenProject project) {
List<PluginWrapper> plugins = new ArrayList<>();
// now find all the plugin entries, either in
// build.plugins or build.pluginManagement.plugins, profiles.plugins and reporting
getPlugins(plugins, project.getModel());
getReportingPlugins(plugins, project.getModel());
getPluginManagementPlugins(plugins, project.getModel());
addPluginsInProfiles(plugins, project.getModel());
return plugins;
}
private void addPluginsInProfiles(List<PluginWrapper> plugins, Model model) {
List<Profile> profiles = ofNullable(model).map(Model::getProfiles).orElseGet(Collections::emptyList);
for (Profile profile : profiles) {
getProfilePlugins(plugins, profile);
getProfileReportingPlugins(plugins, profile);
getProfilePluginManagementPlugins(plugins, profile);
}
}
private void getProfilePluginManagementPlugins(List<PluginWrapper> plugins, Profile profile) {
List<Plugin> modelPlugins = ofNullable(profile)
.map(Profile::getBuild)
.map(PluginConfiguration::getPluginManagement)
.map(PluginContainer::getPlugins)
.orElseGet(Collections::emptyList);
plugins.addAll(PluginWrapper.addAll(utils.resolvePlugins(modelPlugins), banMavenDefaults));
}
private void getProfileReportingPlugins(List<PluginWrapper> plugins, Profile profile) {
List<ReportPlugin> modelReportPlugins = ofNullable(profile)
.map(ModelBase::getReporting)
.map(Reporting::getPlugins)
.orElseGet(Collections::emptyList);
// add the reporting plugins
plugins.addAll(PluginWrapper.addAll(utils.resolveReportPlugins(modelReportPlugins), banMavenDefaults));
}
private void getProfilePlugins(List<PluginWrapper> plugins, Profile profile) {
List<Plugin> modelPlugins = ofNullable(profile)
.map(Profile::getBuild)
.map(PluginContainer::getPlugins)
.orElseGet(Collections::emptyList);
plugins.addAll(PluginWrapper.addAll(utils.resolvePlugins(modelPlugins), banMavenDefaults));
}
private void getPlugins(List<PluginWrapper> plugins, Model model) {
List<Plugin> modelPlugins = ofNullable(model)
.map(Model::getBuild)
.map(PluginContainer::getPlugins)
.orElseGet(Collections::emptyList);
plugins.addAll(PluginWrapper.addAll(utils.resolvePlugins(modelPlugins), banMavenDefaults));
}
private void getPluginManagementPlugins(List<PluginWrapper> plugins, Model model) {
List<Plugin> modelPlugins = ofNullable(model)
.map(Model::getBuild)
.map(PluginConfiguration::getPluginManagement)
.map(PluginContainer::getPlugins)
.orElseGet(Collections::emptyList);
plugins.addAll(PluginWrapper.addAll(utils.resolvePlugins(modelPlugins), banMavenDefaults));
}
private void getReportingPlugins(List<PluginWrapper> plugins, Model model) {
List<ReportPlugin> modelReportPlugins = ofNullable(model)
.map(ModelBase::getReporting)
.map(Reporting::getPlugins)
.orElseGet(Collections::emptyList);
// add the reporting plugins
plugins.addAll(PluginWrapper.addAll(utils.resolveReportPlugins(modelReportPlugins), banMavenDefaults));
}
/**
* Sets the ban latest.
*
* @param banLatest the banLatest to set
*/
public void setBanLatest(boolean banLatest) {
this.banLatest = banLatest;
}
/**
* Sets the ban release.
*
* @param banRelease the banRelease to set
*/
public void setBanRelease(boolean banRelease) {
this.banRelease = banRelease;
}
/**
* Checks if is ban snapshots.
*
* @return the banSnapshots
*/
public boolean isBanSnapshots() {
return this.banSnapshots;
}
/**
* Sets the ban snapshots.
*
* @param banSnapshots the banSnapshots to set
*/
public void setBanSnapshots(boolean banSnapshots) {
this.banSnapshots = banSnapshots;
}
/**
* Sets the ban timestamps.
*
* @param banTimestamps the banTimestamps to set
*/
public void setBanTimestamps(boolean banTimestamps) {
this.banTimestamps = banTimestamps;
}
@Override
public String toString() {
return String.format(
"RequirePluginVersions[message=%s, banLatest=%b, banRelease=%b, banSnapshots=%b, banTimestamps=%b, phases=%s, additionalPlugins=%s, unCheckedPluginList=%s, unCheckedPlugins=%s]",
getMessage(),
banLatest,
banRelease,
banSnapshots,
banTimestamps,
phases,
additionalPlugins,
unCheckedPluginList,
unCheckedPlugins);
}
}
|
apache/solr | 36,682 | solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cloud.api.collections;
import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
import com.google.common.annotations.VisibleForTesting;
import java.lang.invoke.MethodHandles;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.cloud.DistribStateManager;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.GenericSolrRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.DistributedClusterStateUpdater;
import org.apache.solr.cloud.Overseer;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.DocRouter;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.ReplicaCount;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CollectionAdminParams;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.CommonAdminParams;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
import org.apache.solr.util.TestInjection;
import org.apache.solr.util.TimeOut;
import org.apache.zookeeper.CreateMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Reindex a collection, usually in order to change the index schema.
*
* <p>WARNING: Reindexing is potentially a lossy operation - some indexed data that is not available
* as stored fields may be irretrievably lost, so users should use this command with caution,
* evaluating the potential impact by using different source and target collection names first, and
* preserving the source collection until the evaluation is complete.
*
* <p>Reindexing follows these steps:
*
* <ol>
* <li>creates a temporary collection using the most recent schema of the source collection (or
* the one specified in the parameters, which must already exist), and the shape of the
* original collection, unless overridden by parameters.
* <li>copy the source documents to the temporary collection, using their stored fields and
* reindexing them using the specified schema. NOTE: some data loss may occur if the original
* stored field data is not available!
* <li>create the target collection from scratch with the specified name (or the same as source if
* not specified) and the specified parameters. NOTE: if the target name was not specified or
* is the same as the source collection then a unique sequential collection name will be used.
* <li>copy the documents from the source collection to the target collection.
* <li>if the source and target collection name was the same then set up an alias pointing from
* the source collection name to the actual (sequentially named) target collection
* <li>optionally delete the source collection.
* </ol>
*/
public class ReindexCollectionCmd implements CollApiCmds.CollectionApiCommand {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String COMMAND = "cmd";
public static final String REINDEX_STATUS = "reindexStatus";
public static final String REMOVE_SOURCE = "removeSource";
public static final String TARGET = "target";
public static final String TARGET_COL_PREFIX = ".rx_";
public static final String CHK_COL_PREFIX = ".rx_ck_";
public static final String REINDEXING_STATE = CollectionAdminRequest.PROPERTY_PREFIX + "rx";
public static final String STATE = "state";
public static final String PHASE = "phase";
private static final List<String> COLLECTION_PARAMS =
Stream.concat(
CollectionHandlingUtils.numReplicasProperties().stream(),
Stream.of(
ZkStateReader.CONFIGNAME_PROP,
ZkStateReader.NUM_SHARDS_PROP,
ZkStateReader.REPLICATION_FACTOR,
"shards",
CollectionAdminParams.CREATE_NODE_SET_PARAM,
CollectionAdminParams.CREATE_NODE_SET_SHUFFLE_PARAM))
.collect(Collectors.toUnmodifiableList());
private final CollectionCommandContext ccc;
private static AtomicInteger tmpCollectionSeq = new AtomicInteger();
public enum State {
IDLE,
RUNNING,
ABORTED,
FINISHED;
public String toLower() {
return toString().toLowerCase(Locale.ROOT);
}
public static State get(Object p) {
if (p == null) {
return null;
}
p = String.valueOf(p).toLowerCase(Locale.ROOT);
return states.get(p);
}
static final Map<String, State> states =
Stream.of(State.values())
.collect(Collectors.toUnmodifiableMap(State::toLower, Function.identity()));
}
public enum Cmd {
START,
ABORT,
STATUS;
public String toLower() {
return toString().toLowerCase(Locale.ROOT);
}
public static Cmd get(String p) {
if (p == null) {
return null;
}
p = p.toLowerCase(Locale.ROOT);
return cmds.get(p);
}
static final Map<String, Cmd> cmds =
Stream.of(Cmd.values())
.collect(Collectors.toUnmodifiableMap(Cmd::toLower, Function.identity()));
}
public ReindexCollectionCmd(CollectionCommandContext ccc) {
this.ccc = ccc;
}
@Override
public void call(ClusterState clusterState, ZkNodeProps message, NamedList<Object> results)
throws Exception {
log.debug("*** called: {}", message);
String extCollection = message.getStr(CommonParams.NAME);
if (extCollection == null) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "Source collection name must be specified");
}
boolean followAliases = message.getBool(FOLLOW_ALIASES, false);
String collection;
if (followAliases) {
collection =
ccc.getSolrCloudManager().getClusterStateProvider().resolveSimpleAlias(extCollection);
} else {
collection = extCollection;
}
if (!clusterState.hasCollection(collection)) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "Source collection name must exist");
}
String target = message.getStr(TARGET);
if (target == null) {
target = collection;
} else {
if (followAliases) {
target = ccc.getSolrCloudManager().getClusterStateProvider().resolveSimpleAlias(target);
}
}
boolean sameTarget = target.equals(collection) || target.equals(extCollection);
boolean removeSource = message.getBool(REMOVE_SOURCE, false);
Cmd command = Cmd.get(message.getStr(COMMAND, Cmd.START.toLower()));
if (command == null) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "Unknown command: " + message.getStr(COMMAND));
}
Map<String, Object> reindexingState =
getReindexingState(ccc.getSolrCloudManager().getDistribStateManager(), collection);
if (!reindexingState.containsKey(STATE)) {
reindexingState.put(STATE, State.IDLE.toLower());
}
State state = State.get(reindexingState.get(STATE));
if (command == Cmd.ABORT) {
log.info("Abort requested for collection {}, setting the state to ABORTED.", collection);
// check that it's running
if (state != State.RUNNING) {
log.debug(
"Abort requested for collection {} but command is not running: {}", collection, state);
return;
}
setReindexingState(collection, State.ABORTED, null);
reindexingState.put(STATE, "aborting");
results.add(REINDEX_STATUS, reindexingState);
// if needed the cleanup will be performed by the running instance of the command
return;
} else if (command == Cmd.STATUS) {
results.add(REINDEX_STATUS, reindexingState);
return;
}
// command == Cmd.START
// check it's not already running
if (state == State.RUNNING) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"Reindex is already running for collection "
+ collection
+ ". If you are sure this is not the case you can issue &cmd=abort to clean up this state.");
}
DocCollection coll = clusterState.getCollection(collection);
boolean aborted = false;
int batchSize = message.getInt(CommonParams.ROWS, 100);
String query = message.getStr(CommonParams.Q, "*:*");
String fl = message.getStr(CommonParams.FL, "*");
Integer rf = message.getInt(ZkStateReader.REPLICATION_FACTOR, coll.getReplicationFactor());
ReplicaCount numReplicas = ReplicaCount.fromMessage(message, coll);
int numShards = message.getInt(ZkStateReader.NUM_SHARDS_PROP, coll.getActiveSlices().size());
DocRouter router = coll.getRouter();
if (router == null) {
router = DocRouter.DEFAULT;
}
String configName = message.getStr(ZkStateReader.CONFIGNAME_PROP, coll.getConfigName());
String targetCollection;
int seq = tmpCollectionSeq.getAndIncrement();
if (sameTarget) {
do {
targetCollection = TARGET_COL_PREFIX + extCollection + "_" + seq;
if (!clusterState.hasCollection(targetCollection)) {
break;
}
seq = tmpCollectionSeq.getAndIncrement();
} while (clusterState.hasCollection(targetCollection));
} else {
targetCollection = target;
}
String chkCollection = CHK_COL_PREFIX + extCollection;
String daemonUrl = null;
Replica daemonReplica = null;
Exception exc = null;
boolean createdTarget = false;
try {
// set the running flag
reindexingState.clear();
reindexingState.put("actualSourceCollection", collection);
reindexingState.put("actualTargetCollection", targetCollection);
reindexingState.put("checkpointCollection", chkCollection);
reindexingState.put("inputDocs", getNumberOfDocs(collection));
reindexingState.put(PHASE, "creating target and checkpoint collections");
setReindexingState(collection, State.RUNNING, reindexingState);
// 0. set up target and checkpoint collections
NamedList<Object> cmdResults = new NamedList<>();
ZkNodeProps cmd;
if (clusterState.hasCollection(targetCollection)) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Target collection " + targetCollection + " already exists! Delete it first.");
}
if (clusterState.hasCollection(chkCollection)) {
// delete the checkpoint collection
cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.DELETE.toLower(),
CommonParams.NAME,
chkCollection);
new DeleteCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"deleting old checkpoint collection " + chkCollection, cmdResults, true);
}
if (maybeAbort(collection)) {
aborted = true;
return;
}
Map<String, Object> propMap = new HashMap<>();
propMap.put(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower());
propMap.put(CommonParams.NAME, targetCollection);
propMap.put(ZkStateReader.NUM_SHARDS_PROP, numShards);
propMap.put(CollectionAdminParams.COLL_CONF, configName);
// init first from the same router
propMap.put("router.name", router.getName());
for (String key : coll.keySet()) {
if (key.startsWith("router.")) {
propMap.put(key, coll.get(key));
}
}
// then apply overrides if present
for (String key : message.keySet()) {
if (key.startsWith("router.")) {
propMap.put(key, message.getStr(key));
} else if (COLLECTION_PARAMS.contains(key)) {
propMap.put(key, message.get(key));
}
}
propMap.put(CommonAdminParams.WAIT_FOR_FINAL_STATE, true);
if (rf != null) {
propMap.put(ZkStateReader.REPLICATION_FACTOR, rf);
}
numReplicas.writeProps(propMap);
// create the target collection
cmd = new ZkNodeProps(propMap);
cmdResults = new NamedList<>();
new CreateCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
createdTarget = true;
CollectionHandlingUtils.checkResults(
"creating target collection " + targetCollection, cmdResults, true);
// create the checkpoint collection - use RF=1 and 1 shard
cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
CommonParams.NAME, chkCollection,
ZkStateReader.NUM_SHARDS_PROP, "1",
ZkStateReader.REPLICATION_FACTOR, "1",
CollectionAdminParams.COLL_CONF, "_default",
CommonAdminParams.WAIT_FOR_FINAL_STATE, "true");
cmdResults = new NamedList<>();
new CreateCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"creating checkpoint collection " + chkCollection, cmdResults, true);
// wait for a while until we see both collections
try {
for (String col : List.of(targetCollection, chkCollection)) {
ccc.getZkStateReader().waitForState(col, 30, TimeUnit.SECONDS, Objects::nonNull);
}
} catch (TimeoutException e) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR, "Could not fully create temporary collection(s)");
}
clusterState = ccc.getSolrCloudManager().getClusterState();
if (maybeAbort(collection)) {
aborted = true;
return;
}
// 1. put the source collection in read-only mode
cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.MODIFYCOLLECTION.toLower(),
ZkStateReader.COLLECTION_PROP,
collection,
ZkStateReader.READ_ONLY,
"true");
if (ccc.getDistributedClusterStateUpdater().isDistributedStateUpdate()) {
ccc.getDistributedClusterStateUpdater()
.doSingleStateUpdate(
DistributedClusterStateUpdater.MutatingCommand.CollectionModifyCollection,
cmd,
ccc.getSolrCloudManager(),
ccc.getZkStateReader());
} else {
ccc.offerStateUpdate(cmd);
}
TestInjection.injectReindexLatch();
if (maybeAbort(collection)) {
aborted = true;
return;
}
// 2. copy the documents to target
// Recipe taken from:
// http://joelsolr.blogspot.com/2016/10/solr-63-batch-jobs-parallel-etl-and.html
ModifiableSolrParams q = new ModifiableSolrParams();
q.set(CommonParams.QT, "/stream");
q.set("collection", collection);
q.set(
"expr",
"daemon(id=\""
+ targetCollection
+ "\","
+ "terminate=\"true\","
+ "commit("
+ targetCollection
+ ","
+ "update("
+ targetCollection
+ ","
+ "batchSize="
+ batchSize
+ ","
+ "topic("
+ chkCollection
+ ","
+ collection
+ ","
+ "q=\""
+ query
+ "\","
+ "fl=\""
+ fl
+ "\","
+ "id=\"topic_"
+ targetCollection
+ "\","
+ "rows=\""
+ batchSize
+ "\","
+ "initialCheckpoint=\"0\"))))");
log.debug("- starting copying documents from {} to {}", collection, targetCollection);
SolrResponse rsp;
try {
rsp = new QueryRequest(q).process(ccc.getSolrCloudManager().getSolrClient());
} catch (Exception e) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Unable to copy documents from " + collection + " to " + targetCollection,
e);
}
daemonReplica = getReplicaForDaemon(rsp, coll);
if (daemonReplica == null) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Unable to copy documents from "
+ collection
+ " to "
+ targetCollection
+ ": "
+ Utils.toJSONString(rsp));
}
reindexingState.put("daemonUrl", daemonReplica.getCoreUrl());
reindexingState.put("daemonName", targetCollection);
reindexingState.put(PHASE, "copying documents");
setReindexingState(collection, State.RUNNING, reindexingState);
// wait for the daemon to finish
waitForDaemon(targetCollection, daemonReplica, collection, targetCollection, reindexingState);
if (maybeAbort(collection)) {
aborted = true;
return;
}
log.debug("- finished copying from {} to {}", collection, targetCollection);
// fail here or earlier during daemon run
TestInjection.injectReindexFailure();
// 5. if (sameTarget) set up an alias to use targetCollection as the source name
if (sameTarget) {
log.debug("- setting up alias from {} to {}", extCollection, targetCollection);
cmd = new ZkNodeProps(CommonParams.NAME, extCollection, "collections", targetCollection);
cmdResults = new NamedList<>();
new CreateAliasCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"setting up alias " + extCollection + " -> " + targetCollection, cmdResults, true);
reindexingState.put("alias", extCollection + " -> " + targetCollection);
}
reindexingState.remove("daemonUrl");
reindexingState.remove("daemonName");
reindexingState.put("processedDocs", getNumberOfDocs(targetCollection));
reindexingState.put(PHASE, "copying done, finalizing");
setReindexingState(collection, State.RUNNING, reindexingState);
if (maybeAbort(collection)) {
aborted = true;
return;
}
// 6. delete the checkpoint collection
log.debug("- deleting {}", chkCollection);
cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.DELETE.toLower(),
CommonParams.NAME,
chkCollection);
cmdResults = new NamedList<>();
new DeleteCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"deleting checkpoint collection " + chkCollection, cmdResults, true);
// 7. optionally delete the source collection
if (removeSource) {
log.debug("- deleting source collection");
cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.DELETE.toLower(),
CommonParams.NAME,
collection,
FOLLOW_ALIASES,
"false");
cmdResults = new NamedList<>();
new DeleteCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"deleting source collection " + collection, cmdResults, true);
} else {
// 8. clear readOnly on source
ZkNodeProps props =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.MODIFYCOLLECTION.toLower(),
ZkStateReader.COLLECTION_PROP,
collection,
ZkStateReader.READ_ONLY,
null);
if (ccc.getDistributedClusterStateUpdater().isDistributedStateUpdate()) {
ccc.getDistributedClusterStateUpdater()
.doSingleStateUpdate(
DistributedClusterStateUpdater.MutatingCommand.CollectionModifyCollection,
props,
ccc.getSolrCloudManager(),
ccc.getZkStateReader());
} else {
ccc.offerStateUpdate(props);
}
}
// 9. set FINISHED state on the target and clear the state on the source
ZkNodeProps props =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.MODIFYCOLLECTION.toLower(),
ZkStateReader.COLLECTION_PROP,
targetCollection,
REINDEXING_STATE,
State.FINISHED.toLower());
if (ccc.getDistributedClusterStateUpdater().isDistributedStateUpdate()) {
ccc.getDistributedClusterStateUpdater()
.doSingleStateUpdate(
DistributedClusterStateUpdater.MutatingCommand.CollectionModifyCollection,
props,
ccc.getSolrCloudManager(),
ccc.getZkStateReader());
} else {
ccc.offerStateUpdate(props);
}
reindexingState.put(STATE, State.FINISHED.toLower());
reindexingState.put(PHASE, "done");
removeReindexingState(collection);
} catch (Exception e) {
log.warn("Error during reindexing of {}", extCollection, e);
exc = e;
aborted = true;
} finally {
if (aborted) {
cleanup(
collection,
targetCollection,
chkCollection,
daemonReplica,
targetCollection,
createdTarget);
if (exc != null) {
results.add("error", exc.toString());
}
reindexingState.put(STATE, State.ABORTED.toLower());
}
results.add(REINDEX_STATUS, reindexingState);
}
}
private static final String REINDEXING_STATE_PATH = "/.reindexing";
private Map<String, Object> setReindexingState(
String collection, State state, Map<String, Object> props) throws Exception {
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
DistribStateManager stateManager = ccc.getSolrCloudManager().getDistribStateManager();
if (props == null) { // retrieve existing props, if any
props = stateManager.getJson(path);
}
Map<String, Object> copyProps = new HashMap<>(props);
copyProps.put("state", state.toLower());
if (stateManager.hasData(path)) {
stateManager.setData(path, Utils.toJSON(copyProps), -1);
} else {
stateManager.makePath(path, Utils.toJSON(copyProps), CreateMode.PERSISTENT, false);
}
return copyProps;
}
private void removeReindexingState(String collection) throws Exception {
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
DistribStateManager stateManager = ccc.getSolrCloudManager().getDistribStateManager();
if (stateManager.hasData(path)) {
stateManager.removeData(path, -1);
}
}
@VisibleForTesting
public static Map<String, Object> getReindexingState(
DistribStateManager stateManager, String collection) throws Exception {
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
// make it modifiable
return new TreeMap<>(stateManager.getJson(path));
}
private long getNumberOfDocs(String collection) {
var solrClient = ccc.getCoreContainer().getZkController().getSolrClient();
try {
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CommonParams.Q, "*:*");
params.add(CommonParams.ROWS, "0");
QueryResponse rsp = solrClient.query(collection, params);
return rsp.getResults().getNumFound();
} catch (Exception e) {
return 0L;
}
}
private boolean maybeAbort(String collection) throws Exception {
DocCollection coll =
ccc.getSolrCloudManager().getClusterState().getCollectionOrNull(collection);
if (coll == null) {
// collection no longer present - abort
log.info("## Aborting - collection {} no longer present.", collection);
return true;
}
Map<String, Object> reindexingState =
getReindexingState(ccc.getSolrCloudManager().getDistribStateManager(), collection);
State state = State.get(reindexingState.getOrDefault(STATE, State.RUNNING.toLower()));
if (state != State.ABORTED) {
return false;
}
log.info("## Aborting - collection {} state is {}", collection, state);
return true;
}
// XXX see #waitForDaemon() for why we need this
private Replica getReplicaForDaemon(SolrResponse rsp, DocCollection coll) {
@SuppressWarnings({"unchecked"})
Map<String, Object> rs = (Map<String, Object>) rsp.getResponse().get("result-set");
if (rs == null || rs.isEmpty()) {
if (log.isDebugEnabled()) {
log.debug(" -- Missing daemon information in response: {}", Utils.toJSONString(rsp));
}
}
@SuppressWarnings({"unchecked"})
List<Object> list = (List<Object>) rs.get("docs");
if (list == null) {
if (log.isDebugEnabled()) {
log.debug(" -- Missing daemon information in response: {}", Utils.toJSONString(rsp));
}
return null;
}
String replicaName = null;
for (Object o : list) {
@SuppressWarnings({"unchecked"})
Map<String, Object> map = (Map<String, Object>) o;
String op = (String) map.get("DaemonOp");
if (op == null) {
continue;
}
String[] parts = op.split("\\s+");
if (parts.length != 4) {
log.debug(" -- Invalid daemon location info, expected 4 tokens: {}", op);
return null;
}
// check if it's plausible
if (parts[3].contains("shard") && parts[3].contains("replica")) {
replicaName = parts[3];
break;
} else {
log.debug(" -- daemon location info likely invalid: {}", op);
return null;
}
}
if (replicaName == null) {
return null;
}
// build a baseUrl of the replica
for (Replica r : coll.getReplicas()) {
if (replicaName.equals(r.getCoreName())) {
return r;
}
}
return null;
}
// XXX currently this is complicated to due a bug in the way the daemon 'list'
// XXX operation is implemented - see SOLR-13245. We need to query the actual
// XXX SolrCore where the daemon is running
@SuppressWarnings({"unchecked"})
private void waitForDaemon(
String daemonName,
Replica daemonReplica,
String sourceCollection,
String targetCollection,
Map<String, Object> reindexingState)
throws Exception {
boolean isRunning;
int statusCheck = 0;
do {
isRunning = false;
statusCheck++;
try {
NamedList<Object> rsp = executeDaemonAction("list", daemonName, daemonReplica);
Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
if (rs == null || rs.isEmpty()) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Can't find daemon list: missing result-set: " + Utils.toJSONString(rsp));
}
List<Object> list = (List<Object>) rs.get("docs");
if (list == null) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Can't find daemon list: missing result-set: " + Utils.toJSONString(rsp));
}
if (list.isEmpty()) { // finished?
break;
}
for (Object o : list) {
Map<String, Object> map = (Map<String, Object>) o;
String id = (String) map.get("id");
if (daemonName.equals(id)) {
isRunning = true;
// fail here
TestInjection.injectReindexFailure();
break;
}
}
} catch (Exception e) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Exception waiting for daemon " + daemonName + " at " + daemonReplica.getCoreUrl(),
e);
}
if (statusCheck % 5 == 0) {
reindexingState.put("processedDocs", getNumberOfDocs(targetCollection));
setReindexingState(sourceCollection, State.RUNNING, reindexingState);
}
ccc.getSolrCloudManager().getTimeSource().sleep(2000);
} while (isRunning && !maybeAbort(sourceCollection));
}
@SuppressWarnings({"unchecked"})
private void killDaemon(String daemonName, Replica daemonReplica) throws Exception {
if (log.isDebugEnabled()) {
log.debug("-- killing daemon {} at {}", daemonName, daemonReplica.getCoreUrl());
}
// we should really use 'kill' here, but then we will never
// know when the daemon actually finishes running - 'kill' only
// sets a flag that may be noticed much later
NamedList<Object> rsp = executeDaemonAction("stop", daemonName, daemonReplica);
// /result-set/docs/[0]/DaemonOp : Deamon:id killed on coreName
if (log.isDebugEnabled()) {
log.debug(" -- stop daemon response: {}", Utils.toJSONString(rsp));
}
Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
if (rs == null || rs.isEmpty()) {
log.warn(
"Problem killing daemon {}: missing result-set: {}", daemonName, Utils.toJSONString(rsp));
return;
}
List<Object> list = (List<Object>) rs.get("docs");
if (list == null) {
log.warn(
"Problem killing daemon {}: missing result-set: {}", daemonName, Utils.toJSONString(rsp));
return;
}
if (list.isEmpty()) { // already finished?
return;
}
for (Object o : list) {
Map<String, Object> map = (Map<String, Object>) o;
String op = (String) map.get("DaemonOp");
if (op == null) {
continue;
}
if (op.contains(daemonName) && op.contains("stopped")) {
// now wait for the daemon to really stop
TimeOut timeOut =
new TimeOut(60, TimeUnit.SECONDS, ccc.getSolrCloudManager().getTimeSource());
while (!timeOut.hasTimedOut()) {
rsp = executeDaemonAction("list", daemonName, daemonReplica);
rs = (Map<String, Object>) rsp.get("result-set");
if (rs == null || rs.isEmpty()) {
log.warn(
"Problem killing daemon {}: missing result-set: {}",
daemonName,
Utils.toJSONString(rsp));
break;
}
List<Object> list2 = (List<Object>) rs.get("docs");
if (list2 == null) {
log.warn(
"Problem killing daemon {}: missing result-set: {}",
daemonName,
Utils.toJSONString(rsp));
break;
}
if (list2.isEmpty()) { // already finished?
break;
}
Map<String, Object> status2 = null;
for (Object o2 : list2) {
Map<String, Object> map2 = (Map<String, Object>) o2;
if (daemonName.equals(map2.get("id"))) {
status2 = map2;
break;
}
}
if (status2 == null) { // finished?
break;
}
Number stopTime = (Number) status2.get("stopTime");
if (stopTime.longValue() > 0) {
break;
}
}
if (timeOut.hasTimedOut()) {
log.warn("Problem killing daemon {}: timed out waiting for daemon to stop.", daemonName);
// proceed anyway
}
}
}
// now kill it - it's already stopped, this simply removes its status
executeDaemonAction("kill", daemonName, daemonReplica);
}
private NamedList<Object> executeDaemonAction(
String action, String daemonName, Replica daemonReplica) throws Exception {
final var solrClient = ccc.getCoreContainer().getDefaultHttpSolrClient();
final var solrParams = new ModifiableSolrParams();
solrParams.set("action", action);
solrParams.set(CommonParams.ID, daemonName);
solrParams.set(CommonParams.DISTRIB, false);
final var req =
new GenericSolrRequest(
SolrRequest.METHOD.POST, "/stream", SolrRequest.SolrRequestType.ADMIN, solrParams)
.setRequiresCollection(true);
final var solrResponse =
solrClient.requestWithBaseUrl(daemonReplica.getBaseUrl(), daemonReplica.getCoreName(), req);
return solrResponse.getResponse();
}
private void cleanup(
String collection,
String targetCollection,
String chkCollection,
Replica daemonReplica,
String daemonName,
boolean createdTarget)
throws Exception {
log.info("## Cleaning up after abort or error");
// 1. kill the daemon
// 2. cleanup target / chk collections IFF the source collection still exists and is not empty
// 3. cleanup collection state
if (daemonReplica != null) {
killDaemon(daemonName, daemonReplica);
}
ClusterState clusterState = ccc.getSolrCloudManager().getClusterState();
NamedList<Object> cmdResults = new NamedList<>();
if (createdTarget
&& !collection.equals(targetCollection)
&& clusterState.hasCollection(targetCollection)) {
log.debug(" -- removing {}", targetCollection);
ZkNodeProps cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.DELETE.toLower(),
CommonParams.NAME,
targetCollection,
FOLLOW_ALIASES,
"false");
new DeleteCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"CLEANUP: deleting target collection " + targetCollection, cmdResults, false);
}
// remove chk collection
if (clusterState.hasCollection(chkCollection)) {
log.debug(" -- removing {}", chkCollection);
ZkNodeProps cmd =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.DELETE.toLower(),
CommonParams.NAME,
chkCollection,
FOLLOW_ALIASES,
"false");
cmdResults = new NamedList<>();
new DeleteCollectionCmd(ccc).call(clusterState, cmd, cmdResults);
CollectionHandlingUtils.checkResults(
"CLEANUP: deleting checkpoint collection " + chkCollection, cmdResults, false);
}
log.debug(" -- turning readOnly mode off for {}", collection);
ZkNodeProps props =
new ZkNodeProps(
Overseer.QUEUE_OPERATION,
CollectionParams.CollectionAction.MODIFYCOLLECTION.toLower(),
ZkStateReader.COLLECTION_PROP,
collection,
ZkStateReader.READ_ONLY,
null);
if (ccc.getDistributedClusterStateUpdater().isDistributedStateUpdate()) {
ccc.getDistributedClusterStateUpdater()
.doSingleStateUpdate(
DistributedClusterStateUpdater.MutatingCommand.CollectionModifyCollection,
props,
ccc.getSolrCloudManager(),
ccc.getZkStateReader());
} else {
ccc.offerStateUpdate(props);
}
removeReindexingState(collection);
}
}
|
googleapis/google-cloud-java | 36,688 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/ListUsersResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/user.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Response message for the `ListUsers` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListUsersResponse}
*/
public final class ListUsersResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.ListUsersResponse)
ListUsersResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListUsersResponse.newBuilder() to construct.
private ListUsersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListUsersResponse() {
users_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListUsersResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.UserProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListUsersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.UserProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListUsersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.Builder.class);
}
public static final int USERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.accounts.v1beta.User> users_;
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.accounts.v1beta.User> getUsersList() {
return users_;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.UserOrBuilder>
getUsersOrBuilderList() {
return users_;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
@java.lang.Override
public int getUsersCount() {
return users_.size();
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.User getUsers(int index) {
return users_.get(index);
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UserOrBuilder getUsersOrBuilder(int index) {
return users_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < users_.size(); i++) {
output.writeMessage(1, users_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < users_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, users_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.ListUsersResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse other =
(com.google.shopping.merchant.accounts.v1beta.ListUsersResponse) obj;
if (!getUsersList().equals(other.getUsersList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getUsersCount() > 0) {
hash = (37 * hash) + USERS_FIELD_NUMBER;
hash = (53 * hash) + getUsersList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListUsers` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListUsersResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.ListUsersResponse)
com.google.shopping.merchant.accounts.v1beta.ListUsersResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.UserProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListUsersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.UserProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListUsersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.class,
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (usersBuilder_ == null) {
users_ = java.util.Collections.emptyList();
} else {
users_ = null;
usersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.UserProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListUsersResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse build() {
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse buildPartial() {
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse result =
new com.google.shopping.merchant.accounts.v1beta.ListUsersResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse result) {
if (usersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
users_ = java.util.Collections.unmodifiableList(users_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.users_ = users_;
} else {
result.users_ = usersBuilder_.build();
}
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.ListUsersResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1beta.ListUsersResponse) {
return mergeFrom((com.google.shopping.merchant.accounts.v1beta.ListUsersResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.accounts.v1beta.ListUsersResponse other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.ListUsersResponse.getDefaultInstance())
return this;
if (usersBuilder_ == null) {
if (!other.users_.isEmpty()) {
if (users_.isEmpty()) {
users_ = other.users_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureUsersIsMutable();
users_.addAll(other.users_);
}
onChanged();
}
} else {
if (!other.users_.isEmpty()) {
if (usersBuilder_.isEmpty()) {
usersBuilder_.dispose();
usersBuilder_ = null;
users_ = other.users_;
bitField0_ = (bitField0_ & ~0x00000001);
usersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getUsersFieldBuilder()
: null;
} else {
usersBuilder_.addAllMessages(other.users_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.accounts.v1beta.User m =
input.readMessage(
com.google.shopping.merchant.accounts.v1beta.User.parser(),
extensionRegistry);
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.add(m);
} else {
usersBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.accounts.v1beta.User> users_ =
java.util.Collections.emptyList();
private void ensureUsersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
users_ = new java.util.ArrayList<com.google.shopping.merchant.accounts.v1beta.User>(users_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.User,
com.google.shopping.merchant.accounts.v1beta.User.Builder,
com.google.shopping.merchant.accounts.v1beta.UserOrBuilder>
usersBuilder_;
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.User> getUsersList() {
if (usersBuilder_ == null) {
return java.util.Collections.unmodifiableList(users_);
} else {
return usersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public int getUsersCount() {
if (usersBuilder_ == null) {
return users_.size();
} else {
return usersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.User getUsers(int index) {
if (usersBuilder_ == null) {
return users_.get(index);
} else {
return usersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder setUsers(int index, com.google.shopping.merchant.accounts.v1beta.User value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.set(index, value);
onChanged();
} else {
usersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder setUsers(
int index, com.google.shopping.merchant.accounts.v1beta.User.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.set(index, builderForValue.build());
onChanged();
} else {
usersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder addUsers(com.google.shopping.merchant.accounts.v1beta.User value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.add(value);
onChanged();
} else {
usersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder addUsers(int index, com.google.shopping.merchant.accounts.v1beta.User value) {
if (usersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUsersIsMutable();
users_.add(index, value);
onChanged();
} else {
usersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder addUsers(
com.google.shopping.merchant.accounts.v1beta.User.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.add(builderForValue.build());
onChanged();
} else {
usersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder addUsers(
int index, com.google.shopping.merchant.accounts.v1beta.User.Builder builderForValue) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.add(index, builderForValue.build());
onChanged();
} else {
usersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder addAllUsers(
java.lang.Iterable<? extends com.google.shopping.merchant.accounts.v1beta.User> values) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, users_);
onChanged();
} else {
usersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder clearUsers() {
if (usersBuilder_ == null) {
users_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
usersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public Builder removeUsers(int index) {
if (usersBuilder_ == null) {
ensureUsersIsMutable();
users_.remove(index);
onChanged();
} else {
usersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.User.Builder getUsersBuilder(int index) {
return getUsersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.UserOrBuilder getUsersOrBuilder(int index) {
if (usersBuilder_ == null) {
return users_.get(index);
} else {
return usersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.accounts.v1beta.UserOrBuilder>
getUsersOrBuilderList() {
if (usersBuilder_ != null) {
return usersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(users_);
}
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.User.Builder addUsersBuilder() {
return getUsersFieldBuilder()
.addBuilder(com.google.shopping.merchant.accounts.v1beta.User.getDefaultInstance());
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public com.google.shopping.merchant.accounts.v1beta.User.Builder addUsersBuilder(int index) {
return getUsersFieldBuilder()
.addBuilder(
index, com.google.shopping.merchant.accounts.v1beta.User.getDefaultInstance());
}
/**
*
*
* <pre>
* The users from the specified account.
* </pre>
*
* <code>repeated .google.shopping.merchant.accounts.v1beta.User users = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.accounts.v1beta.User.Builder>
getUsersBuilderList() {
return getUsersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.User,
com.google.shopping.merchant.accounts.v1beta.User.Builder,
com.google.shopping.merchant.accounts.v1beta.UserOrBuilder>
getUsersFieldBuilder() {
if (usersBuilder_ == null) {
usersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.User,
com.google.shopping.merchant.accounts.v1beta.User.Builder,
com.google.shopping.merchant.accounts.v1beta.UserOrBuilder>(
users_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
users_ = null;
}
return usersBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.ListUsersResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.ListUsersResponse)
private static final com.google.shopping.merchant.accounts.v1beta.ListUsersResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.ListUsersResponse();
}
public static com.google.shopping.merchant.accounts.v1beta.ListUsersResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListUsersResponse> PARSER =
new com.google.protobuf.AbstractParser<ListUsersResponse>() {
@java.lang.Override
public ListUsersResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListUsersResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListUsersResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListUsersResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/storm | 36,982 | storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/handler/LogviewerLogSearchHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon.logviewer.handler;
import static java.util.stream.Collectors.toList;
import static org.apache.storm.daemon.utils.ListFunctionalSupport.drop;
import static org.apache.storm.daemon.utils.ListFunctionalSupport.first;
import static org.apache.storm.daemon.utils.ListFunctionalSupport.last;
import static org.apache.storm.daemon.utils.ListFunctionalSupport.rest;
import static org.apache.storm.daemon.utils.PathUtil.truncatePathToLastElements;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import jakarta.ws.rs.core.Response;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import net.minidev.json.JSONAware;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.storm.DaemonConfig;
import org.apache.storm.daemon.common.JsonResponseBuilder;
import org.apache.storm.daemon.logviewer.LogviewerConstant;
import org.apache.storm.daemon.logviewer.utils.DirectoryCleaner;
import org.apache.storm.daemon.logviewer.utils.ExceptionMeterNames;
import org.apache.storm.daemon.logviewer.utils.LogviewerResponseBuilder;
import org.apache.storm.daemon.logviewer.utils.ResourceAuthorizer;
import org.apache.storm.daemon.logviewer.utils.WorkerLogs;
import org.apache.storm.daemon.supervisor.SupervisorUtils;
import org.apache.storm.daemon.ui.InvalidRequestException;
import org.apache.storm.daemon.utils.StreamUtil;
import org.apache.storm.daemon.utils.UrlBuilder;
import org.apache.storm.metric.StormMetricsRegistry;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.ServerUtils;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogviewerLogSearchHandler {
private static final Logger LOG = LoggerFactory.getLogger(LogviewerLogSearchHandler.class);
public static final int GREP_MAX_SEARCH_SIZE = 1024;
public static final int GREP_BUF_SIZE = 2048;
public static final int GREP_CONTEXT_SIZE = 128;
public static final Pattern WORKER_LOG_FILENAME_PATTERN = Pattern.compile("^worker.log(.*)");
private final Meter numDeepSearchNoResult;
private final Histogram numFileScanned;
private final Meter numSearchRequestNoResult;
private final Meter numFileOpenExceptions;
private final Meter numFileReadExceptions;
private final Map<String, Object> stormConf;
private final Path logRoot;
private final Path daemonLogRoot;
private final ResourceAuthorizer resourceAuthorizer;
private final Integer logviewerPort;
private final String scheme;
private final DirectoryCleaner directoryCleaner;
/**
* Constructor.
*
* @param stormConf storm configuration
* @param logRoot log root directory
* @param daemonLogRoot daemon log root directory
* @param resourceAuthorizer {@link ResourceAuthorizer}
* @param metricsRegistry The logviewer metrics registry
*/
public LogviewerLogSearchHandler(Map<String, Object> stormConf, Path logRoot, Path daemonLogRoot,
ResourceAuthorizer resourceAuthorizer, StormMetricsRegistry metricsRegistry) {
this.stormConf = stormConf;
this.logRoot = logRoot.toAbsolutePath().normalize();
this.daemonLogRoot = daemonLogRoot.toAbsolutePath().normalize();
this.resourceAuthorizer = resourceAuthorizer;
Object httpsPort = stormConf.get(DaemonConfig.LOGVIEWER_HTTPS_PORT);
if (httpsPort == null) {
this.logviewerPort = ObjectReader.getInt(stormConf.get(DaemonConfig.LOGVIEWER_PORT));
this.scheme = "http";
} else {
this.logviewerPort = ObjectReader.getInt(httpsPort);
this.scheme = "https";
}
this.numDeepSearchNoResult = metricsRegistry.registerMeter("logviewer:num-deep-search-no-result");
this.numFileScanned = metricsRegistry.registerHistogram("logviewer:num-files-scanned-per-deep-search");
this.numSearchRequestNoResult = metricsRegistry.registerMeter("logviewer:num-search-request-no-result");
this.numFileOpenExceptions = metricsRegistry.registerMeter(ExceptionMeterNames.NUM_FILE_OPEN_EXCEPTIONS);
this.numFileReadExceptions = metricsRegistry.registerMeter(ExceptionMeterNames.NUM_FILE_READ_EXCEPTIONS);
this.directoryCleaner = new DirectoryCleaner(metricsRegistry);
}
/**
* Search from a worker log file.
*
* @param fileName log file
* @param user username
* @param isDaemon whether the log file is regarding worker or daemon
* @param search search string
* @param numMatchesStr the count of maximum matches
* @param offsetStr start offset for log file
* @param callback callbackParameterName for JSONP
* @param origin origin
* @return Response containing JSON content representing search result
*/
public Response searchLogFile(String fileName, String user, boolean isDaemon, String search,
String numMatchesStr, String offsetStr, String callback, String origin)
throws IOException, InvalidRequestException {
boolean noResult = true;
Path rootDir = isDaemon ? daemonLogRoot : logRoot;
Path rawFile = rootDir.resolve(fileName);
Path absFile = rawFile.toAbsolutePath().normalize();
if (!absFile.startsWith(rootDir) || !rawFile.normalize().toString().equals(rawFile.toString())) {
//Ensure filename doesn't contain ../ parts
return searchLogFileNotFound(callback);
}
if (isDaemon && Paths.get(fileName).getNameCount() != 1) {
//Don't permit path traversal for calls intended to read from the daemon logs
return searchLogFileNotFound(callback);
}
Response response;
if (absFile.toFile().exists()) {
if (isDaemon || resourceAuthorizer.isUserAllowedToAccessFile(user, fileName)) {
Integer numMatchesInt = numMatchesStr != null ? tryParseIntParam("num-matches", numMatchesStr) : null;
Integer offsetInt = offsetStr != null ? tryParseIntParam("start-byte-offset", offsetStr) : null;
try {
if (StringUtils.isNotEmpty(search) && search.getBytes("UTF-8").length <= GREP_MAX_SEARCH_SIZE) {
Map<String, Object> entity = new HashMap<>();
entity.put("isDaemon", isDaemon ? "yes" : "no");
Map<String, Object> res = substringSearch(absFile, search, isDaemon, numMatchesInt, offsetInt);
entity.putAll(res);
noResult = ((List) res.get("matches")).isEmpty();
response = LogviewerResponseBuilder.buildSuccessJsonResponse(entity, callback, origin);
} else {
throw new InvalidRequestException("Search substring must be between 1 and 1024 "
+ "UTF-8 bytes in size (inclusive)");
}
} catch (Exception ex) {
response = LogviewerResponseBuilder.buildExceptionJsonResponse(ex, callback);
}
} else {
// unauthorized
response = LogviewerResponseBuilder.buildUnauthorizedUserJsonResponse(user, callback);
}
} else {
response = searchLogFileNotFound(callback);
}
if (noResult) {
numSearchRequestNoResult.mark();
}
return response;
}
private Response searchLogFileNotFound(String callback) {
Map<String, String> entity = new HashMap<>();
entity.put("error", "Not Found");
entity.put("errorMessage", "The file was not found on this node.");
return new JsonResponseBuilder().setData(entity).setCallback(callback).setStatus(404).build();
}
/**
* Advanced search across worker log files in a topology.
*
* @param topologyId topology ID
* @param user username
* @param search search string
* @param numMatchesStr the count of maximum matches. Note that this number is with respect to each port, not to each log or each search
* request
* @param portStr worker port, null or '*' if the request wants to search from all worker logs
* @param fileOffsetStr index (offset) of the log files
* @param offsetStr start offset for log file
* @param searchArchived true if the request wants to search also archived files, false if not
* @param callback callbackParameterName for JSONP
* @param origin origin
* @return Response containing JSON content representing search result
*/
public Response deepSearchLogsForTopology(String topologyId, String user, String search,
String numMatchesStr, String portStr, String fileOffsetStr, String offsetStr,
Boolean searchArchived, String callback, String origin) throws IOException {
int numMatchedFiles = 0;
int numScannedFiles = 0;
Path rootDir = logRoot;
Path absTopoDir = rootDir.resolve(topologyId).toAbsolutePath().normalize();
Object returnValue;
if (StringUtils.isEmpty(search) || !absTopoDir.toFile().exists() || !absTopoDir.startsWith(rootDir)) {
returnValue = new ArrayList<>();
} else {
int fileOffset = ObjectReader.getInt(fileOffsetStr, 0);
int offset = ObjectReader.getInt(offsetStr, 0);
int numMatches = ObjectReader.getInt(numMatchesStr, 1);
if (StringUtils.isEmpty(portStr) || portStr.equals("*")) {
try (Stream<Path> topoDir = Files.list(absTopoDir)) {
// check for all ports
Stream<List<Path>> portsOfLogs = topoDir
.map(portDir -> logsForPort(user, portDir))
.filter(logs -> logs != null && !logs.isEmpty());
if (BooleanUtils.isNotTrue(searchArchived)) {
portsOfLogs = portsOfLogs.map(fl -> Collections.singletonList(first(fl)));
}
final List<Matched> matchedList = portsOfLogs
.map(logs -> findNMatches(logs, numMatches, 0, 0, search))
.collect(toList());
numMatchedFiles = matchedList.stream().mapToInt(match -> match.getMatches().size()).sum();
numScannedFiles = matchedList.stream().mapToInt(match -> match.openedFiles).sum();
returnValue = matchedList;
}
} else {
int port = Integer.parseInt(portStr);
// check just the one port
@SuppressWarnings("unchecked")
List<Integer> slotsPorts = SupervisorUtils.getSlotsPorts(stormConf);
boolean containsPort = slotsPorts.stream()
.anyMatch(slotPort -> slotPort != null && (slotPort == port));
if (!containsPort) {
returnValue = new ArrayList<>();
} else {
Path absPortDir = absTopoDir.resolve(Integer.toString(port)).toAbsolutePath().normalize();
if (!absPortDir.toFile().exists()
|| !absPortDir.startsWith(absTopoDir)) {
returnValue = new ArrayList<>();
} else {
List<Path> filteredLogs = logsForPort(user, absPortDir);
if (BooleanUtils.isNotTrue(searchArchived)) {
filteredLogs = Collections.singletonList(first(filteredLogs));
fileOffset = 0;
}
returnValue = findNMatches(filteredLogs, numMatches, fileOffset, offset, search);
numMatchedFiles = ((Matched) returnValue).getMatches().size();
numScannedFiles = ((Matched) returnValue).openedFiles;
}
}
}
}
if (numMatchedFiles == 0) {
numDeepSearchNoResult.mark();
}
numFileScanned.update(numScannedFiles);
return LogviewerResponseBuilder.buildSuccessJsonResponse(returnValue, callback, origin);
}
private Integer tryParseIntParam(String paramName, String value) throws InvalidRequestException {
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
throw new InvalidRequestException("Could not parse " + paramName + " to an integer");
}
}
@VisibleForTesting
Map<String, Object> substringSearch(Path file, String searchString) throws InvalidRequestException {
return substringSearch(file, searchString, false, 10, 0);
}
@VisibleForTesting
Map<String, Object> substringSearch(Path file, String searchString, int numMatches) throws InvalidRequestException {
return substringSearch(file, searchString, false, numMatches, 0);
}
@VisibleForTesting
Map<String, Object> substringSearch(Path file,
String searchString,
int numMatches,
int startByteOffset) throws InvalidRequestException {
return substringSearch(file, searchString, false, numMatches, startByteOffset);
}
private Map<String, Object> substringSearch(Path file, String searchString, boolean isDaemon, Integer numMatches,
Integer startByteOffset) throws InvalidRequestException {
if (StringUtils.isEmpty(searchString)) {
throw new IllegalArgumentException("Precondition fails: search string should not be empty.");
}
if (searchString.getBytes(StandardCharsets.UTF_8).length > GREP_MAX_SEARCH_SIZE) {
throw new IllegalArgumentException("Precondition fails: the length of search string should be less than "
+ GREP_MAX_SEARCH_SIZE);
}
boolean isZipFile = file.toString().endsWith(".gz");
try (InputStream fis = Files.newInputStream(file)) {
try (InputStream gzippedInputStream = isZipFile ? new GZIPInputStream(fis) : fis;
BufferedInputStream stream = new BufferedInputStream(gzippedInputStream)) {
//It's more likely to be a file read exception here, so we don't differentiate
int fileLength = isZipFile ? (int) ServerUtils.zipFileSize(file.toFile()) : (int) Files.size(file);
ByteBuffer buf = ByteBuffer.allocate(GREP_BUF_SIZE);
final byte[] bufArray = buf.array();
final byte[] searchBytes = searchString.getBytes(StandardCharsets.UTF_8);
numMatches = numMatches != null ? numMatches : 10;
startByteOffset = startByteOffset != null ? startByteOffset : 0;
// Start at the part of the log file we are interested in.
// Allow searching when start-byte-offset == file-len so it doesn't blow up on 0-length files
if (startByteOffset > fileLength) {
throw new InvalidRequestException("Cannot search past the end of the file");
}
if (startByteOffset > 0) {
StreamUtil.skipBytes(stream, startByteOffset);
}
Arrays.fill(bufArray, (byte) 0);
int totalBytesRead = 0;
int bytesRead = stream.read(bufArray, 0, Math.min(fileLength, GREP_BUF_SIZE));
buf.limit(bytesRead);
totalBytesRead += bytesRead;
List<Map<String, Object>> initialMatches = new ArrayList<>();
int initBufOffset = 0;
int byteOffset = startByteOffset;
byte[] beforeBytes = null;
Map<String, Object> ret = new HashMap<>();
while (true) {
SubstringSearchResult searchRet = bufferSubstringSearch(isDaemon, file, fileLength, byteOffset, initBufOffset,
stream, startByteOffset, totalBytesRead, buf, searchBytes, initialMatches, numMatches, beforeBytes);
List<Map<String, Object>> matches = searchRet.getMatches();
Integer newByteOffset = searchRet.getNewByteOffset();
byte[] newBeforeBytes = searchRet.getNewBeforeBytes();
if (matches.size() < numMatches && totalBytesRead + startByteOffset < fileLength) {
// The start index is positioned to find any possible
// occurrence search string that did not quite fit in the
// buffer on the previous read.
final int newBufOffset = Math.min(buf.limit(), GREP_MAX_SEARCH_SIZE) - searchBytes.length;
totalBytesRead = rotateGrepBuffer(buf, stream, totalBytesRead, fileLength);
if (totalBytesRead < 0) {
throw new InvalidRequestException("Cannot search past the end of the file");
}
initialMatches = matches;
initBufOffset = newBufOffset;
byteOffset = newByteOffset;
beforeBytes = newBeforeBytes;
} else {
ret.put("isDaemon", isDaemon ? "yes" : "no");
Integer nextByteOffset = null;
if (matches.size() >= numMatches || totalBytesRead < fileLength) {
nextByteOffset = (Integer) last(matches).get("byteOffset") + searchBytes.length;
if (fileLength <= nextByteOffset) {
nextByteOffset = null;
}
}
ret.putAll(mkGrepResponse(searchBytes, startByteOffset, matches, nextByteOffset));
break;
}
}
return ret;
} catch (UnknownHostException | UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
numFileReadExceptions.mark();
throw new RuntimeException(e);
}
} catch (IOException e) {
numFileOpenExceptions.mark();
throw new RuntimeException(e);
}
}
@VisibleForTesting
Map<String, Object> substringSearchDaemonLog(Path file, String searchString) throws InvalidRequestException {
return substringSearch(file, searchString, true, 10, 0);
}
/**
* Get the filtered, authorized, sorted log files for a port.
*/
@VisibleForTesting
List<Path> logsForPort(String user, Path portDir) {
try {
List<Path> workerLogs = directoryCleaner.getFilesForDir(portDir).stream()
.filter(file -> WORKER_LOG_FILENAME_PATTERN.asPredicate().test(file.getFileName().toString()))
.collect(toList());
return workerLogs.stream()
.filter(log -> resourceAuthorizer.isUserAllowedToAccessFile(user, WorkerLogs.getTopologyPortWorkerLog(log)))
.map(p -> {
try {
return Pair.of(p, Files.getLastModifiedTime(p));
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.sorted(Comparator.comparing((Pair<Path, FileTime> p) -> p.getRight()).reversed())
.map(p -> p.getLeft())
.collect(toList());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Find the first N matches of target string in files.
*
* @param logs all candidate log files to search
* @param numMatches number of matches expected
* @param fileOffset number of log files to skip initially
* @param startByteOffset number of byte to be ignored in each log file
* @param targetStr searched string
* @return all matched results
*/
@VisibleForTesting
Matched findNMatches(List<Path> logs, int numMatches, int fileOffset, int startByteOffset, String targetStr) {
logs = drop(logs, fileOffset);
LOG.debug("{} files to scan", logs.size());
List<Map<String, Object>> matches = new ArrayList<>();
int matchCount = 0;
int scannedFiles = 0;
while (true) {
if (logs.isEmpty()) {
//fileOffset = one past last scanned file
break;
}
Path firstLog = logs.get(0);
Map<String, Object> matchInLog;
try {
LOG.debug("Looking through {}", firstLog);
matchInLog = substringSearch(firstLog, targetStr, numMatches - matchCount, startByteOffset);
scannedFiles++;
} catch (InvalidRequestException e) {
LOG.error("Can't search past end of file.", e);
matchInLog = new HashMap<>();
}
String fileName = WorkerLogs.getTopologyPortWorkerLog(firstLog);
//This section simply put the formatted log filename and corresponding port in the matching.
final List<Map<String, Object>> newMatches = new ArrayList<>(matches);
Map<String, Object> currentFileMatch = new HashMap<>(matchInLog);
currentFileMatch.put("fileName", fileName);
Path firstLogAbsPath = firstLog.toAbsolutePath().normalize();
currentFileMatch.put("port", truncatePathToLastElements(firstLogAbsPath, 2).getName(0).toString());
newMatches.add(currentFileMatch);
int newCount = matchCount + ((List<?>) matchInLog.getOrDefault("matches", Collections.emptyList())).size();
if (newCount == matchCount) {
// matches and matchCount is not changed
logs = rest(logs);
startByteOffset = 0;
fileOffset = fileOffset + 1;
} else if (newCount >= numMatches) {
matches = newMatches;
//fileOffset = the index of last scanned file
break;
} else {
matches = newMatches;
logs = rest(logs);
startByteOffset = 0;
fileOffset = fileOffset + 1;
matchCount = newCount;
}
}
LOG.debug("scanned {} files", scannedFiles);
return new Matched(fileOffset, targetStr, matches, scannedFiles);
}
/**
* As the file is read into a buffer, 1/2 the buffer's size at a time, we search the buffer for matches of the substring and return a
* list of zero or more matches.
*/
private SubstringSearchResult bufferSubstringSearch(boolean isDaemon, Path file, int fileLength, int offsetToBuf,
int initBufOffset, BufferedInputStream stream, Integer bytesSkipped,
int bytesRead, ByteBuffer haystack, byte[] needle,
List<Map<String, Object>> initialMatches, Integer numMatches, byte[] beforeBytes)
throws IOException {
int bufOffset = initBufOffset;
List<Map<String, Object>> matches = initialMatches;
byte[] newBeforeBytes;
Integer newByteOffset;
while (true) {
int offset = offsetOfBytes(haystack.array(), needle, bufOffset);
if (matches.size() < numMatches && offset >= 0) {
final int fileOffset = offsetToBuf + offset;
final int bytesNeededAfterMatch = haystack.limit() - GREP_CONTEXT_SIZE - needle.length;
byte[] beforeArg = null;
byte[] afterArg = null;
if (offset < GREP_CONTEXT_SIZE) {
beforeArg = beforeBytes;
}
if (offset > bytesNeededAfterMatch) {
afterArg = tryReadAhead(stream, haystack, offset, fileLength, bytesRead);
}
bufOffset = offset + needle.length;
matches.add(mkMatchData(needle, haystack, offset, fileOffset,
file.toAbsolutePath().normalize(), isDaemon, beforeArg, afterArg));
} else {
int beforeStrToOffset = Math.min(haystack.limit(), GREP_MAX_SEARCH_SIZE);
int beforeStrFromOffset = Math.max(0, beforeStrToOffset - GREP_CONTEXT_SIZE);
newBeforeBytes = Arrays.copyOfRange(haystack.array(), beforeStrFromOffset, beforeStrToOffset);
// It's OK if new-byte-offset is negative.
// This is normal if we are out of bytes to read from a small file.
if (matches.size() >= numMatches) {
newByteOffset = ((Number) last(matches).get("byteOffset")).intValue() + needle.length;
} else {
newByteOffset = bytesSkipped + bytesRead - GREP_MAX_SEARCH_SIZE;
}
break;
}
}
return new SubstringSearchResult(matches, newByteOffset, newBeforeBytes);
}
private int rotateGrepBuffer(ByteBuffer buf, BufferedInputStream stream, int totalBytesRead, int fileLength) throws IOException {
byte[] bufArray = buf.array();
// Copy the 2nd half of the buffer to the first half.
System.arraycopy(bufArray, GREP_MAX_SEARCH_SIZE, bufArray, 0, GREP_MAX_SEARCH_SIZE);
// Zero-out the 2nd half to prevent accidental matches.
Arrays.fill(bufArray, GREP_MAX_SEARCH_SIZE, bufArray.length, (byte) 0);
// Fill the 2nd half with new bytes from the stream.
int bytesRead = stream.read(bufArray, GREP_MAX_SEARCH_SIZE, Math.min(fileLength, GREP_MAX_SEARCH_SIZE));
buf.limit(GREP_MAX_SEARCH_SIZE + bytesRead);
return totalBytesRead + bytesRead;
}
private Map<String, Object> mkMatchData(byte[] needle, ByteBuffer haystack, int haystackOffset, int fileOffset, Path canonicalPath,
boolean isDaemon, byte[] beforeBytes, byte[] afterBytes)
throws UnsupportedEncodingException, UnknownHostException {
String url;
if (isDaemon) {
url = urlToMatchCenteredInLogPageDaemonFile(needle, canonicalPath, fileOffset, logviewerPort);
} else {
url = urlToMatchCenteredInLogPage(needle, canonicalPath, fileOffset, logviewerPort);
}
byte[] haystackBytes = haystack.array();
String beforeString;
String afterString;
if (haystackOffset >= GREP_CONTEXT_SIZE) {
beforeString = new String(haystackBytes, (haystackOffset - GREP_CONTEXT_SIZE), GREP_CONTEXT_SIZE, "UTF-8");
} else {
int numDesired = Math.max(0, GREP_CONTEXT_SIZE - haystackOffset);
int beforeSize = beforeBytes != null ? beforeBytes.length : 0;
int numExpected = Math.min(beforeSize, numDesired);
if (numExpected > 0) {
StringBuilder sb = new StringBuilder();
sb.append(new String(beforeBytes, beforeSize - numExpected, numExpected, "UTF-8"));
sb.append(new String(haystackBytes, 0, haystackOffset, "UTF-8"));
beforeString = sb.toString();
} else {
beforeString = new String(haystackBytes, 0, haystackOffset, "UTF-8");
}
}
int needleSize = needle.length;
int afterOffset = haystackOffset + needleSize;
int haystackSize = haystack.limit();
if ((afterOffset + GREP_CONTEXT_SIZE) < haystackSize) {
afterString = new String(haystackBytes, afterOffset, GREP_CONTEXT_SIZE, "UTF-8");
} else {
int numDesired = GREP_CONTEXT_SIZE - (haystackSize - afterOffset);
int afterSize = afterBytes != null ? afterBytes.length : 0;
int numExpected = Math.min(afterSize, numDesired);
if (numExpected > 0) {
StringBuilder sb = new StringBuilder();
sb.append(new String(haystackBytes, afterOffset, (haystackSize - afterOffset), "UTF-8"));
sb.append(new String(afterBytes, 0, numExpected, "UTF-8"));
afterString = sb.toString();
} else {
afterString = new String(haystackBytes, afterOffset, (haystackSize - afterOffset), "UTF-8");
}
}
Map<String, Object> ret = new HashMap<>();
ret.put("byteOffset", fileOffset);
ret.put("beforeString", beforeString);
ret.put("afterString", afterString);
ret.put("matchString", new String(needle, "UTF-8"));
ret.put("logviewerURL", url);
return ret;
}
/**
* Tries once to read ahead in the stream to fill the context and resets the stream to its position before the call.
*/
private byte[] tryReadAhead(BufferedInputStream stream, ByteBuffer haystack, int offset, int fileLength, int bytesRead)
throws IOException {
int numExpected = Math.min(fileLength - bytesRead, GREP_CONTEXT_SIZE);
byte[] afterBytes = new byte[numExpected];
stream.mark(numExpected);
// Only try reading once.
stream.read(afterBytes, 0, numExpected);
stream.reset();
return afterBytes;
}
/**
* Searches a given byte array for a match of a sub-array of bytes. Returns the offset to the byte that matches, or -1 if no match was
* found.
*/
private int offsetOfBytes(byte[] buffer, byte[] search, int initOffset) {
if (search.length <= 0) {
throw new IllegalArgumentException("Search array should not be empty.");
}
if (initOffset < 0) {
throw new IllegalArgumentException("Start offset shouldn't be negative.");
}
int offset = initOffset;
int candidateOffset = initOffset;
int valOffset = 0;
int retOffset = 0;
while (true) {
if (search.length - valOffset <= 0) {
// found
retOffset = candidateOffset;
break;
} else {
if (offset >= buffer.length) {
// We ran out of buffer for the search.
retOffset = -1;
break;
} else {
if (search[valOffset] != buffer[offset]) {
// The match at this candidate offset failed, so start over with the
// next candidate byte from the buffer.
int newOffset = candidateOffset + 1;
offset = newOffset;
candidateOffset = newOffset;
valOffset = 0;
} else {
// So far it matches. Keep going...
offset = offset + 1;
valOffset = valOffset + 1;
}
}
}
}
return retOffset;
}
/**
* This response data only includes a next byte offset if there is more of the file to read.
*/
private Map<String, Object> mkGrepResponse(byte[] searchBytes, Integer offset, List<Map<String, Object>> matches,
Integer nextByteOffset) throws UnsupportedEncodingException {
Map<String, Object> ret = new HashMap<>();
ret.put("searchString", new String(searchBytes, "UTF-8"));
ret.put("startByteOffset", offset);
ret.put("matches", matches);
if (nextByteOffset != null) {
ret.put("nextByteOffset", nextByteOffset);
}
return ret;
}
@VisibleForTesting
String urlToMatchCenteredInLogPage(byte[] needle, Path canonicalPath, int offset, Integer port) throws UnknownHostException {
final String host = Utils.hostname();
final Path truncatedFilePath = truncatePathToLastElements(canonicalPath, 3);
Map<String, Object> parameters = new HashMap<>();
parameters.put("file", truncatedFilePath.toString());
parameters.put("start", Math.max(0, offset - (LogviewerConstant.DEFAULT_BYTES_PER_PAGE / 2) - (needle.length / -2)));
parameters.put("length", LogviewerConstant.DEFAULT_BYTES_PER_PAGE);
return UrlBuilder.build(String.format(this.scheme + "://%s:%d/api/v1/log", host, port), parameters);
}
@VisibleForTesting
String urlToMatchCenteredInLogPageDaemonFile(byte[] needle, Path canonicalPath, int offset, Integer port) throws UnknownHostException {
final String host = Utils.hostname();
final Path truncatedFilePath = truncatePathToLastElements(canonicalPath, 1);
Map<String, Object> parameters = new HashMap<>();
parameters.put("file", truncatedFilePath.toString());
parameters.put("start", Math.max(0, offset - (LogviewerConstant.DEFAULT_BYTES_PER_PAGE / 2) - (needle.length / -2)));
parameters.put("length", LogviewerConstant.DEFAULT_BYTES_PER_PAGE);
return UrlBuilder.build(String.format(this.scheme + "://%s:%d/api/v1/daemonlog", host, port), parameters);
}
@VisibleForTesting
public static class Matched implements JSONAware {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private int fileOffset;
private String searchString;
private List<Map<String, Object>> matches;
@JsonIgnore
private final int openedFiles;
/**
* Constructor.
*
* @param fileOffset offset (index) of the files
* @param searchString search string
* @param matches map representing matched search result
* @param openedFiles number of files scanned, used for metrics only
*/
public Matched(int fileOffset, String searchString, List<Map<String, Object>> matches, int openedFiles) {
this.fileOffset = fileOffset;
this.searchString = searchString;
this.matches = matches;
this.openedFiles = openedFiles;
}
public int getFileOffset() {
return fileOffset;
}
public String getSearchString() {
return searchString;
}
public List<Map<String, Object>> getMatches() {
return matches;
}
@Override
public String toJSONString() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
private static class SubstringSearchResult {
private List<Map<String, Object>> matches;
private Integer newByteOffset;
private byte[] newBeforeBytes;
SubstringSearchResult(List<Map<String, Object>> matches, Integer newByteOffset, byte[] newBeforeBytes) {
this.matches = matches;
this.newByteOffset = newByteOffset;
this.newBeforeBytes = newBeforeBytes;
}
public List<Map<String, Object>> getMatches() {
return matches;
}
public Integer getNewByteOffset() {
return newByteOffset;
}
public byte[] getNewBeforeBytes() {
return newBeforeBytes;
}
}
}
|
apache/ignite-3 | 37,035 | modules/partition-replicator/src/test/java/org/apache/ignite/internal/partition/replicator/raft/snapshot/incoming/IncomingSnapshotCopierTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.partition.replicator.raft.snapshot.incoming;
import static java.util.concurrent.CompletableFuture.completedFuture;
import static java.util.stream.Collectors.toList;
import static org.apache.ignite.internal.catalog.commands.CatalogUtils.DEFAULT_PARTITION_COUNT;
import static org.apache.ignite.internal.hlc.HybridTimestamp.hybridTimestampToLong;
import static org.apache.ignite.internal.partition.replicator.raft.snapshot.outgoing.SnapshotMetaUtils.snapshotMetaAt;
import static org.apache.ignite.internal.testframework.IgniteTestUtils.runAsync;
import static org.apache.ignite.internal.testframework.matchers.CompletableFutureExceptionMatcher.willThrowFast;
import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willCompleteSuccessfully;
import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willSucceedIn;
import static org.apache.ignite.internal.tx.TxState.ABORTED;
import static org.apache.ignite.internal.tx.TxState.COMMITTED;
import static org.apache.ignite.internal.util.CompletableFutures.nullCompletedFuture;
import static org.apache.ignite.internal.util.IgniteUtils.shutdownAndAwaitTermination;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.ignite.internal.binarytuple.BinaryTupleReader;
import org.apache.ignite.internal.catalog.Catalog;
import org.apache.ignite.internal.catalog.CatalogService;
import org.apache.ignite.internal.catalog.descriptors.CatalogIndexDescriptor;
import org.apache.ignite.internal.failure.FailureProcessor;
import org.apache.ignite.internal.hlc.HybridClock;
import org.apache.ignite.internal.hlc.HybridClockImpl;
import org.apache.ignite.internal.hlc.HybridTimestamp;
import org.apache.ignite.internal.lowwatermark.TestLowWatermark;
import org.apache.ignite.internal.lowwatermark.message.GetLowWatermarkRequest;
import org.apache.ignite.internal.lowwatermark.message.LowWatermarkMessagesFactory;
import org.apache.ignite.internal.network.InternalClusterNode;
import org.apache.ignite.internal.network.MessagingService;
import org.apache.ignite.internal.network.TopologyService;
import org.apache.ignite.internal.partition.replicator.network.PartitionReplicationMessagesFactory;
import org.apache.ignite.internal.partition.replicator.network.raft.SnapshotMetaRequest;
import org.apache.ignite.internal.partition.replicator.network.raft.SnapshotMetaResponse;
import org.apache.ignite.internal.partition.replicator.network.raft.SnapshotMvDataRequest;
import org.apache.ignite.internal.partition.replicator.network.raft.SnapshotMvDataResponse.ResponseEntry;
import org.apache.ignite.internal.partition.replicator.network.raft.SnapshotTxDataRequest;
import org.apache.ignite.internal.partition.replicator.network.replication.BinaryRowMessage;
import org.apache.ignite.internal.partition.replicator.raft.PartitionSnapshotInfo;
import org.apache.ignite.internal.partition.replicator.raft.PartitionSnapshotInfoSerializer;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.PartitionSnapshotStorage;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.PartitionTxStateAccessImpl;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.SnapshotUri;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.ZonePartitionKey;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.outgoing.OutgoingSnapshotsManager;
import org.apache.ignite.internal.raft.RaftGroupConfiguration;
import org.apache.ignite.internal.raft.RaftGroupConfigurationConverter;
import org.apache.ignite.internal.replicator.TablePartitionId;
import org.apache.ignite.internal.replicator.message.ReplicaMessagesFactory;
import org.apache.ignite.internal.schema.BinaryRow;
import org.apache.ignite.internal.schema.Column;
import org.apache.ignite.internal.schema.SchemaDescriptor;
import org.apache.ignite.internal.schema.row.RowAssembler;
import org.apache.ignite.internal.storage.MvPartitionStorage;
import org.apache.ignite.internal.storage.ReadResult;
import org.apache.ignite.internal.storage.RowId;
import org.apache.ignite.internal.storage.StorageException;
import org.apache.ignite.internal.storage.engine.MvPartitionMeta;
import org.apache.ignite.internal.storage.engine.MvTableStorage;
import org.apache.ignite.internal.storage.impl.TestMvPartitionStorage;
import org.apache.ignite.internal.storage.impl.TestMvTableStorage;
import org.apache.ignite.internal.storage.lease.LeaseInfo;
import org.apache.ignite.internal.table.distributed.gc.GcUpdateHandler;
import org.apache.ignite.internal.table.distributed.gc.MvGc;
import org.apache.ignite.internal.table.distributed.index.IndexUpdateHandler;
import org.apache.ignite.internal.table.distributed.raft.snapshot.FullStateTransferIndexChooser;
import org.apache.ignite.internal.table.distributed.raft.snapshot.PartitionMvStorageAccessImpl;
import org.apache.ignite.internal.table.distributed.raft.snapshot.TablePartitionKey;
import org.apache.ignite.internal.table.impl.DummySchemaManagerImpl;
import org.apache.ignite.internal.testframework.BaseIgniteAbstractTest;
import org.apache.ignite.internal.tx.TransactionIds;
import org.apache.ignite.internal.tx.TxMeta;
import org.apache.ignite.internal.tx.TxState;
import org.apache.ignite.internal.tx.impl.EnlistedPartitionGroup;
import org.apache.ignite.internal.tx.message.TxMessagesFactory;
import org.apache.ignite.internal.tx.message.TxMetaMessage;
import org.apache.ignite.internal.tx.storage.state.TxStatePartitionStorage;
import org.apache.ignite.internal.tx.storage.state.TxStateStorage;
import org.apache.ignite.internal.tx.storage.state.test.TestTxStatePartitionStorage;
import org.apache.ignite.internal.tx.storage.state.test.TestTxStateStorage;
import org.apache.ignite.internal.type.NativeTypes;
import org.apache.ignite.internal.versioned.VersionedSerialization;
import org.apache.ignite.raft.jraft.Status;
import org.apache.ignite.raft.jraft.error.RaftError;
import org.apache.ignite.raft.jraft.storage.snapshot.SnapshotCopier;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
/** For {@link IncomingSnapshotCopier} testing. */
@ExtendWith(MockitoExtension.class)
public class IncomingSnapshotCopierTest extends BaseIgniteAbstractTest {
private static final int TABLE_ID = 1;
private static final String NODE_NAME = "node";
private static final int PARTITION_ID = 0;
private static final SchemaDescriptor SCHEMA = new SchemaDescriptor(
1,
new Column[]{new Column("key", NativeTypes.stringOf(256), false)},
new Column[]{new Column("value", NativeTypes.stringOf(256), false)}
);
private static final HybridClock CLOCK = new HybridClockImpl();
private static final PartitionReplicationMessagesFactory TABLE_MSG_FACTORY = new PartitionReplicationMessagesFactory();
private static final ReplicaMessagesFactory REPLICA_MESSAGES_FACTORY = new ReplicaMessagesFactory();
private static final LowWatermarkMessagesFactory LWM_MSG_FACTORY = new LowWatermarkMessagesFactory();
private static final TxMessagesFactory TX_MESSAGES_FACTORY = new TxMessagesFactory();
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
private final InternalClusterNode clusterNode = mock(InternalClusterNode.class);
private final UUID snapshotId = UUID.randomUUID();
private final RaftGroupConfigurationConverter raftGroupConfigurationConverter = new RaftGroupConfigurationConverter();
private final MvGc mvGc = mock(MvGc.class);
private final CatalogService catalogService = mock(CatalogService.class);
private final MvPartitionStorage outgoingMvPartitionStorage = new TestMvPartitionStorage(PARTITION_ID);
private final TxStatePartitionStorage outgoingTxStatePartitionStorage = new TestTxStatePartitionStorage();
private final MvTableStorage incomingMvTableStorage = spy(new TestMvTableStorage(TABLE_ID, DEFAULT_PARTITION_COUNT));
private final TxStateStorage incomingTxStateStorage = spy(new TestTxStateStorage());
private final long expLastAppliedIndex = 100500L;
private final long expLastAppliedTerm = 100L;
private final RaftGroupConfiguration expLastGroupConfig = generateRaftGroupConfig();
private final LeaseInfo expLeaseInfo = new LeaseInfo(3000000, new UUID(1, 2), "primary");
private final List<RowId> rowIds = generateRowIds();
private final List<UUID> txIds = generateTxIds();
private final IndexUpdateHandler indexUpdateHandler = mock(IndexUpdateHandler.class);
private final int indexId = 1;
private final RowId nextRowIdToBuildIndex = new RowId(PARTITION_ID);
private final TestLowWatermark lowWatermark = spy(new TestLowWatermark());
@BeforeEach
void setUp(
@Mock Catalog catalog,
@Mock CatalogIndexDescriptor catalogIndexDescriptor
) {
when(mvGc.removeStorage(any(TablePartitionId.class))).then(invocation -> nullCompletedFuture());
when(catalogService.catalogReadyFuture(anyInt())).thenReturn(nullCompletedFuture());
when(catalogService.catalog(anyInt())).thenReturn(catalog);
lenient().when(catalog.index(indexId)).thenReturn(catalogIndexDescriptor);
lenient().when(catalogIndexDescriptor.tableId()).thenReturn(TABLE_ID);
}
@AfterEach
void tearDown() {
shutdownAndAwaitTermination(executorService, 1, TimeUnit.SECONDS);
}
@Test
void test() {
fillOriginalStorages();
createTargetStorages();
MessagingService messagingService = messagingServiceForSuccessScenario(outgoingMvPartitionStorage,
outgoingTxStatePartitionStorage, rowIds, txIds);
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
HybridTimestamp newLowWatermarkValue = CLOCK.now();
assertThat(lowWatermark.updateAndNotify(newLowWatermarkValue), willCompleteSuccessfully());
clearInvocations(lowWatermark);
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
assertThat(runAsync(snapshotCopier::join), willSucceedIn(1, TimeUnit.SECONDS));
assertEquals(Status.OK().getCode(), snapshotCopier.getCode());
TablePartitionId tablePartitionId = new TablePartitionId(TABLE_ID, PARTITION_ID);
verify(mvGc, times(1)).removeStorage(eq(tablePartitionId));
verify(mvGc, times(1)).addStorage(eq(tablePartitionId), any(GcUpdateHandler.class));
MvPartitionStorage incomingMvPartitionStorage = incomingMvTableStorage.getMvPartition(PARTITION_ID);
TxStatePartitionStorage incomingTxStatePartitionStorage = incomingTxStateStorage.getPartitionStorage(PARTITION_ID);
assertEquals(expLastAppliedIndex, outgoingMvPartitionStorage.lastAppliedIndex());
assertEquals(expLastAppliedTerm, outgoingMvPartitionStorage.lastAppliedTerm());
assertArrayEquals(
raftGroupConfigurationConverter.toBytes(expLastGroupConfig),
outgoingMvPartitionStorage.committedGroupConfiguration()
);
assertEquals(expLeaseInfo, outgoingMvPartitionStorage.leaseInfo());
assertEquals(expLastAppliedIndex, outgoingTxStatePartitionStorage.lastAppliedIndex());
assertEquals(expLastAppliedTerm, outgoingTxStatePartitionStorage.lastAppliedTerm());
assertEqualsMvRows(outgoingMvPartitionStorage, incomingMvPartitionStorage, rowIds);
assertEqualsTxStates(outgoingTxStatePartitionStorage, incomingTxStatePartitionStorage, txIds);
verify(incomingMvTableStorage).startRebalancePartition(PARTITION_ID);
verify(incomingTxStatePartitionStorage).startRebalance();
var expSnapshotInfo = new PartitionSnapshotInfo(
expLastAppliedIndex,
expLastAppliedTerm,
expLeaseInfo,
raftGroupConfigurationConverter.toBytes(expLastGroupConfig),
Set.of(TABLE_ID)
);
byte[] expSnapshotInfoBytes = VersionedSerialization.toBytes(expSnapshotInfo, PartitionSnapshotInfoSerializer.INSTANCE);
var expMvPartitionMeta = new MvPartitionMeta(
expSnapshotInfo.lastAppliedIndex(),
expSnapshotInfo.lastAppliedTerm(),
expSnapshotInfo.configurationBytes(),
expSnapshotInfo.leaseInfo(),
expSnapshotInfoBytes
);
verify(incomingTxStatePartitionStorage).finishRebalance(expMvPartitionMeta);
verify(indexUpdateHandler).setNextRowIdToBuildIndex(eq(indexId), eq(nextRowIdToBuildIndex));
verify(lowWatermark).updateLowWatermark(eq(newLowWatermarkValue));
}
private void createTargetStorages() {
assertThat(incomingMvTableStorage.createMvPartition(PARTITION_ID), willCompleteSuccessfully());
incomingTxStateStorage.getOrCreatePartitionStorage(PARTITION_ID);
}
private void fillOriginalStorages() {
fillMvPartitionStorage(
outgoingMvPartitionStorage,
expLastAppliedIndex,
expLastAppliedTerm,
expLastGroupConfig,
rowIds,
expLeaseInfo
);
fillTxStatePartitionStorage(outgoingTxStatePartitionStorage, expLastAppliedIndex, expLastAppliedTerm, txIds);
}
private MessagingService messagingServiceForSuccessScenario(MvPartitionStorage outgoingMvPartitionStorage,
TxStatePartitionStorage outgoingTxStatePartitionStorage, List<RowId> rowIds, List<UUID> txIds) {
MessagingService messagingService = mock(MessagingService.class);
returnSnapshotMetaWhenAskedForIt(messagingService);
when(messagingService.invoke(eq(clusterNode), any(SnapshotMvDataRequest.class), anyLong())).then(answer -> {
SnapshotMvDataRequest snapshotMvDataRequest = answer.getArgument(1);
assertEquals(snapshotId, snapshotMvDataRequest.id());
List<ResponseEntry> responseEntries = createSnapshotMvDataEntries(outgoingMvPartitionStorage, rowIds);
assertThat(responseEntries, not(empty()));
return completedFuture(TABLE_MSG_FACTORY.snapshotMvDataResponse().rows(responseEntries).finish(true).build());
});
lenient().when(messagingService.invoke(eq(clusterNode), any(SnapshotTxDataRequest.class), anyLong())).then(answer -> {
SnapshotTxDataRequest snapshotTxDataRequest = answer.getArgument(1);
assertEquals(snapshotId, snapshotTxDataRequest.id());
List<TxMetaMessage> txMetas = txIds.stream()
.map(outgoingTxStatePartitionStorage::get)
.map(txMeta -> txMeta.toTransactionMetaMessage(REPLICA_MESSAGES_FACTORY, TX_MESSAGES_FACTORY))
.collect(toList());
return completedFuture(TABLE_MSG_FACTORY.snapshotTxDataResponse().txIds(txIds).txMeta(txMetas).finish(true).build());
});
lenient().when(messagingService.invoke(eq(clusterNode), any(GetLowWatermarkRequest.class), anyLong())).thenAnswer(invocation -> {
long lowWatermarkValue = hybridTimestampToLong(lowWatermark.getLowWatermark());
return completedFuture(LWM_MSG_FACTORY.getLowWatermarkResponse().lowWatermark(lowWatermarkValue).build());
});
return messagingService;
}
private void returnSnapshotMetaWhenAskedForIt(MessagingService messagingService) {
when(messagingService.invoke(eq(clusterNode), any(SnapshotMetaRequest.class), anyLong())).then(answer -> {
SnapshotMetaRequest snapshotMetaRequest = answer.getArgument(1);
assertEquals(snapshotId, snapshotMetaRequest.id());
return completedFuture(snapshotMetaResponse(0));
});
}
private SnapshotMetaResponse snapshotMetaResponse(int requiredCatalogVersion) {
return TABLE_MSG_FACTORY.snapshotMetaResponse()
.meta(snapshotMetaAt(
expLastAppliedIndex,
expLastAppliedTerm,
expLastGroupConfig,
requiredCatalogVersion,
Map.of(indexId, nextRowIdToBuildIndex.uuid()),
expLeaseInfo
))
.build();
}
private PartitionSnapshotStorage createPartitionSnapshotStorage(
MvTableStorage incomingTableStorage,
TxStateStorage incomingTxStateStorage,
MessagingService messagingService
) {
TopologyService topologyService = mock(TopologyService.class);
when(topologyService.getByConsistentId(NODE_NAME)).thenReturn(clusterNode);
OutgoingSnapshotsManager outgoingSnapshotsManager = mock(OutgoingSnapshotsManager.class);
when(outgoingSnapshotsManager.messagingService()).thenReturn(messagingService);
var storage = new PartitionSnapshotStorage(
new TablePartitionKey(TABLE_ID, PARTITION_ID),
topologyService,
outgoingSnapshotsManager,
new PartitionTxStateAccessImpl(incomingTxStateStorage.getPartitionStorage(PARTITION_ID)),
catalogService,
mock(FailureProcessor.class),
executorService,
0
);
storage.addMvPartition(TABLE_ID, spy(new PartitionMvStorageAccessImpl(
PARTITION_ID,
incomingTableStorage,
mvGc,
indexUpdateHandler,
mock(GcUpdateHandler.class),
mock(FullStateTransferIndexChooser.class),
new DummySchemaManagerImpl(SCHEMA),
lowWatermark
)));
return storage;
}
private void fillMvPartitionStorage(
MvPartitionStorage storage,
long lastAppliedIndex,
long lastAppliedTerm,
RaftGroupConfiguration raftGroupConfig,
List<RowId> rowIds,
LeaseInfo leaseInfo
) {
assertEquals(0, rowIds.size() % 2, "size=" + rowIds.size());
storage.runConsistently(locker -> {
for (int i = 0; i < rowIds.size(); i++) {
if (i % 2 == 0) {
// Writes committed version.
storage.addWriteCommitted(rowIds.get(i), createRow("k" + i, "v" + i), CLOCK.now());
} else {
// Writes an intent to write (uncommitted version).
storage.addWrite(rowIds.get(i), createRow("k" + i, "v" + i), generateTxId(), 999, PARTITION_ID);
}
}
storage.lastApplied(lastAppliedIndex, lastAppliedTerm);
storage.committedGroupConfiguration(raftGroupConfigurationConverter.toBytes(raftGroupConfig));
storage.updateLease(leaseInfo);
return null;
});
}
private static void fillTxStatePartitionStorage(
TxStatePartitionStorage storage,
long lastAppliedIndex,
long lastAppliedTerm,
List<UUID> txIds
) {
assertEquals(0, txIds.size() % 2, "size=" + txIds.size());
int tableId = 2;
for (int i = 0; i < txIds.size(); i++) {
TxState txState = i % 2 == 0 ? COMMITTED : ABORTED;
List<EnlistedPartitionGroup> enlistedPartitions = List.of(
new EnlistedPartitionGroup(new TablePartitionId(tableId, PARTITION_ID), Set.of(tableId))
);
storage.putForRebalance(txIds.get(i), new TxMeta(txState, enlistedPartitions, CLOCK.now()));
}
storage.lastApplied(lastAppliedIndex, lastAppliedTerm);
}
private static List<ResponseEntry> createSnapshotMvDataEntries(MvPartitionStorage storage, List<RowId> rowIds) {
List<ResponseEntry> responseEntries = new ArrayList<>();
for (RowId rowId : rowIds) {
List<ReadResult> readResults = storage.scanVersions(rowId).stream().collect(toList());
Collections.reverse(readResults);
List<BinaryRowMessage> rowVersions = new ArrayList<>();
long[] timestamps = new long[readResults.size() + (readResults.get(0).isWriteIntent() ? -1 : 0)];
UUID txId = null;
// TODO: https://issues.apache.org/jira/browse/IGNITE-22522 - remove mentions of commit *table*.
Integer commitTableOrZoneId = null;
int commitPartitionId = ReadResult.UNDEFINED_COMMIT_PARTITION_ID;
int j = 0;
for (ReadResult readResult : readResults) {
BinaryRowMessage rowMessage = TABLE_MSG_FACTORY.binaryRowMessage()
.binaryTuple(readResult.binaryRow().tupleSlice())
.schemaVersion(readResult.binaryRow().schemaVersion())
.build();
rowVersions.add(rowMessage);
if (readResult.isWriteIntent()) {
txId = readResult.transactionId();
commitTableOrZoneId = readResult.commitTableOrZoneId();
commitPartitionId = readResult.commitPartitionId();
} else {
timestamps[j++] = readResult.commitTimestamp().longValue();
}
}
responseEntries.add(
TABLE_MSG_FACTORY.responseEntry()
.rowId(rowId.uuid())
.rowVersions(rowVersions)
.timestamps(timestamps)
.txId(txId)
.commitTableOrZoneId(commitTableOrZoneId)
.commitPartitionId(commitPartitionId)
.tableId(TABLE_ID)
.build()
);
}
return responseEntries;
}
private static BinaryRow createRow(String key, String value) {
return new RowAssembler(SCHEMA, -1)
.appendStringNotNull(key)
.appendStringNotNull(value)
.build();
}
private static void assertEqualsMvRows(MvPartitionStorage expected, MvPartitionStorage actual, List<RowId> rowIds) {
for (RowId rowId : rowIds) {
List<ReadResult> expReadResults = expected.scanVersions(rowId).stream().collect(toList());
List<ReadResult> actReadResults = actual.scanVersions(rowId).stream().collect(toList());
assertEquals(expReadResults.size(), actReadResults.size(), rowId.toString());
for (int i = 0; i < expReadResults.size(); i++) {
ReadResult expReadResult = expReadResults.get(i);
ReadResult actReadResult = actReadResults.get(i);
String msg = "RowId=" + rowId + ", i=" + i;
BinaryTupleReader expTuple = new BinaryTupleReader(SCHEMA.length(), expReadResult.binaryRow().tupleSlice());
BinaryTupleReader actTuple = new BinaryTupleReader(SCHEMA.length(), actReadResult.binaryRow().tupleSlice());
assertEquals(expTuple.stringValue(0), actTuple.stringValue(0), msg);
assertEquals(expTuple.stringValue(1), actTuple.stringValue(1), msg);
assertEquals(expReadResult.commitTimestamp(), actReadResult.commitTimestamp(), msg);
assertEquals(expReadResult.transactionId(), actReadResult.transactionId(), msg);
assertEquals(expReadResult.commitTableOrZoneId(), actReadResult.commitTableOrZoneId(), msg);
assertEquals(expReadResult.commitPartitionId(), actReadResult.commitPartitionId(), msg);
assertEquals(expReadResult.isWriteIntent(), actReadResult.isWriteIntent(), msg);
}
}
}
private static void assertEqualsTxStates(TxStatePartitionStorage expected, TxStatePartitionStorage actual, List<UUID> txIds) {
for (UUID txId : txIds) {
assertEquals(expected.get(txId), actual.get(txId));
}
}
@Test
void cancellationMakesJoinFinishIfHangingOnNetworkCallToSnapshotMetadata() throws Exception {
createTargetStorages();
CountDownLatch networkInvokeLatch = new CountDownLatch(1);
MessagingService messagingService = mock(MessagingService.class);
when(messagingService.invoke(any(InternalClusterNode.class), any(SnapshotMetaRequest.class), anyLong())).then(invocation -> {
networkInvokeLatch.countDown();
return new CompletableFuture<>();
});
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
networkInvokeLatch.await(1, TimeUnit.SECONDS);
CompletableFuture<?> cancelAndJoinFuture = runAsync(() -> {
snapshotCopier.cancel();
snapshotCopier.join();
});
assertThat(cancelAndJoinFuture, willSucceedIn(1, TimeUnit.SECONDS));
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID), never()).startRebalance();
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID), never()).abortRebalance();
}
@Test
void cancellationMakesJoinFinishIfHangingOnNetworkCallWhenGettingData() throws Exception {
createTargetStorages();
CountDownLatch networkInvokeLatch = new CountDownLatch(1);
MessagingService messagingService = mock(MessagingService.class);
returnSnapshotMetaWhenAskedForIt(messagingService);
when(messagingService.invoke(any(InternalClusterNode.class), any(SnapshotMvDataRequest.class), anyLong())).then(invocation -> {
networkInvokeLatch.countDown();
return new CompletableFuture<>();
});
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
networkInvokeLatch.await(1, TimeUnit.SECONDS);
CompletableFuture<?> cancelAndJoinFuture = runAsync(() -> {
snapshotCopier.cancel();
snapshotCopier.join();
});
assertThat(cancelAndJoinFuture, willSucceedIn(1, TimeUnit.SECONDS));
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID)).abortRebalance();
}
@Test
void testCancelOnMiddleRebalance() {
fillOriginalStorages();
createTargetStorages();
MessagingService messagingService = messagingServiceForSuccessScenario(outgoingMvPartitionStorage,
outgoingTxStatePartitionStorage, rowIds, txIds);
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
// Let's add a rebalance interruption in the middle.
CompletableFuture<Void> startAddWriteFuture = new CompletableFuture<>();
CompletableFuture<Void> finishAddWriteFuture = new CompletableFuture<>();
doAnswer(answer -> {
startAddWriteFuture.complete(null);
assertThat(finishAddWriteFuture, willCompleteSuccessfully());
return null;
}).when(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID))
.addWrite(any(RowId.class), any(BinaryRow.class), any(UUID.class), anyInt(), anyInt(), anyInt());
// Let's start rebalancing.
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
// Let's try to cancel it in the middle of the rebalance.
CompletableFuture<?> cancelRebalanceFuture = runAsync(() -> {
assertThat(startAddWriteFuture, willCompleteSuccessfully());
CompletableFuture<?> cancelCopierFuture = runAsync(() -> finishAddWriteFuture.complete(null));
snapshotCopier.cancel();
snapshotCopier.join();
assertThat(cancelCopierFuture, willCompleteSuccessfully());
});
assertThat(cancelRebalanceFuture, willCompleteSuccessfully());
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID)).abortRebalance();
}
@Test
void testErrorInProcessOfRebalance() {
fillOriginalStorages();
createTargetStorages();
MessagingService messagingService = messagingServiceForSuccessScenario(outgoingMvPartitionStorage,
outgoingTxStatePartitionStorage, rowIds, txIds);
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
// Let's add an error on the rebalance.
doThrow(new StorageException("Mocked storage exception.")).when(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID))
.addWrite(any(RowId.class), any(BinaryRow.class), any(UUID.class), anyInt(), anyInt(), anyInt());
// Let's start rebalancing.
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
// Let's wait for an error on rebalancing.
assertThat(runAsync(snapshotCopier::join), willThrowFast(IllegalStateException.class));
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID)).abortRebalance();
TablePartitionId tablePartitionId = new TablePartitionId(TABLE_ID, PARTITION_ID);
verify(mvGc, times(1)).removeStorage(eq(tablePartitionId));
verify(mvGc, times(1)).addStorage(eq(tablePartitionId), any(GcUpdateHandler.class));
}
@Test
@Timeout(1)
void cancellationsFromMultipleThreadsDoNotBlockEachOther() throws Exception {
PartitionSnapshotStorage partitionSnapshotStorage = mock(PartitionSnapshotStorage.class);
when(partitionSnapshotStorage.partitionKey()).thenReturn(new ZonePartitionKey(1, 0));
IncomingSnapshotCopier copier = new IncomingSnapshotCopier(
partitionSnapshotStorage,
SnapshotUri.fromStringUri(SnapshotUri.toStringUri(snapshotId, NODE_NAME)),
mock(Executor.class),
0
);
Thread anotherThread = new Thread(copier::cancel);
anotherThread.start();
anotherThread.join();
copier.cancel();
}
private static List<RowId> generateRowIds() {
return List.of(
new RowId(PARTITION_ID),
new RowId(PARTITION_ID),
new RowId(PARTITION_ID),
new RowId(PARTITION_ID)
);
}
private static List<UUID> generateTxIds() {
return List.of(
generateTxId(),
generateTxId(),
generateTxId(),
generateTxId()
);
}
private static RaftGroupConfiguration generateRaftGroupConfig() {
return new RaftGroupConfiguration(
13L,
37L,
List.of("peer"),
List.of("learner"),
List.of("old-peer"),
List.of("old-learner")
);
}
@Test
void laggingSchemasPreventSnapshotInstallation() {
fillOriginalStorages();
createTargetStorages();
MessagingService messagingService = mock(MessagingService.class);
int leaderCatalogVersion = 42;
when(catalogService.catalogReadyFuture(leaderCatalogVersion)).thenReturn(new CompletableFuture<>());
when(messagingService.invoke(eq(clusterNode), any(SnapshotMetaRequest.class), anyLong()))
.thenReturn(completedFuture(snapshotMetaResponse(leaderCatalogVersion)));
PartitionSnapshotStorage partitionSnapshotStorage = createPartitionSnapshotStorage(
incomingMvTableStorage,
incomingTxStateStorage,
messagingService
);
SnapshotCopier snapshotCopier = partitionSnapshotStorage.startIncomingSnapshot(
SnapshotUri.toStringUri(snapshotId, NODE_NAME)
);
assertThat(runAsync(snapshotCopier::join), willSucceedIn(10, TimeUnit.SECONDS));
assertEquals(RaftError.EBUSY.getNumber(), snapshotCopier.getCode());
verify(messagingService, never()).invoke(any(InternalClusterNode.class), any(SnapshotMvDataRequest.class), anyLong());
verify(messagingService, never()).invoke(any(InternalClusterNode.class), any(SnapshotTxDataRequest.class), anyLong());
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID), never()).startRebalance();
verify(partitionSnapshotStorage.partitionsByTableId().get(TABLE_ID), never()).abortRebalance();
assertThatTargetStoragesAreEmpty(incomingMvTableStorage, incomingTxStateStorage);
}
private static void assertThatTargetStoragesAreEmpty(
MvTableStorage incomingMvTableStorage,
TxStateStorage incomingTxStateStorage
) {
MvPartitionStorage incomingMvPartitionStorage = incomingMvTableStorage.getMvPartition(PARTITION_ID);
TxStatePartitionStorage incomingTxStatePartitionStorage = incomingTxStateStorage.getPartitionStorage(PARTITION_ID);
assertEquals(0L, incomingMvPartitionStorage.lastAppliedIndex());
assertEquals(0L, incomingMvPartitionStorage.lastAppliedTerm());
assertArrayEquals(
null,
incomingMvPartitionStorage.committedGroupConfiguration()
);
assertEquals(0L, incomingTxStatePartitionStorage.lastAppliedIndex());
assertEquals(0L, incomingTxStatePartitionStorage.lastAppliedTerm());
assertFalse(incomingMvPartitionStorage.scan(HybridTimestamp.MAX_VALUE).hasNext());
assertFalse(incomingTxStatePartitionStorage.scan().hasNext());
}
private static UUID generateTxId() {
return TransactionIds.transactionId(CLOCK.now(), 1);
}
}
|
google/guava | 36,866 | guava-tests/test/com/google/common/collect/MinMaxPriorityQueueTest.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Platform.reduceExponentIfGwt;
import static com.google.common.collect.Platform.reduceIterationsIfGwt;
import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.asList;
import static java.util.Collections.shuffle;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.collect.testing.IteratorFeature;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.QueueTestSuiteBuilder;
import com.google.common.collect.testing.TestStringQueueGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.testing.NullPointerTester;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Random;
import java.util.SortedMap;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* Unit test for {@link MinMaxPriorityQueue}.
*
* @author Alexei Stolboushkin
* @author Sverre Sundsdal
*/
@GwtCompatible
@NullMarked
public class MinMaxPriorityQueueTest extends TestCase {
private static final Ordering<Integer> SOME_COMPARATOR = Ordering.<Integer>natural().reverse();
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(MinMaxPriorityQueueTest.class);
suite.addTest(
QueueTestSuiteBuilder.using(
new TestStringQueueGenerator() {
@Override
protected Queue<String> create(String[] elements) {
return MinMaxPriorityQueue.create(asList(elements));
}
})
.named("MinMaxPriorityQueue")
.withFeatures(CollectionSize.ANY, CollectionFeature.GENERAL_PURPOSE)
.createTestSuite());
return suite;
}
// Overkill alert! Test all combinations of 0-2 options during creation.
public void testCreation_simple() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create();
assertEquals(11, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_comparator() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).create();
assertEquals(11, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
// We use the rawtypeToWildcard "cast" to make the test work with J2KT in other tests. Leaving one
// test without that cast to verify that using the raw Comparable works outside J2KT.
@J2ktIncompatible // J2KT's translation of raw Comparable is not a supertype of Int translation
public void testCreation_expectedSize() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.expectedSize(8).create();
assertEquals(8, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_expectedSize_comparator() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).expectedSize(8).create();
assertEquals(8, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.maximumSize(42)).create();
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
public void testCreation_comparator_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).maximumSize(42).create();
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_expectedSize_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(8)).maximumSize(42).create();
assertEquals(8, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
private static final ImmutableList<Integer> NUMBERS = ImmutableList.of(4, 8, 15, 16, 23, 42);
public void testCreation_withContents() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_comparator_withContents() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_expectedSize_withContents() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(8)).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(8, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_maximumSize_withContents() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.maximumSize(42)).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
// Now test everything at once
public void testCreation_allOptions() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR)
.expectedSize(8)
.maximumSize(42)
.create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(8, queue.capacity());
assertEquals(42, queue.maximumSize);
assertSame(SOME_COMPARATOR, queue.comparator());
}
// TODO: tests that check the weird interplay between expected size,
// maximum size, size of initial contents, default capacity...
private static void checkNatural(MinMaxPriorityQueue<Integer> queue) {
assertSame(Ordering.natural(), queue.comparator());
}
private static void checkUnbounded(MinMaxPriorityQueue<Integer> queue) {
assertEquals(Integer.MAX_VALUE, queue.maximumSize);
}
public void testHeapIntact() {
Random random = new Random(0);
int heapSize = 99;
int numberOfModifications = 100;
MinMaxPriorityQueue<Integer> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(heapSize)).create();
/*
* this map would contain the same exact elements as the MinMaxHeap; the
* value in the map is the number of occurrences of the key.
*/
SortedMap<Integer, AtomicInteger> replica = Maps.newTreeMap();
assertTrue("Empty heap should be OK", mmHeap.isIntact());
for (int i = 0; i < heapSize; i++) {
int randomInt = random.nextInt();
mmHeap.offer(randomInt);
insertIntoReplica(replica, randomInt);
}
assertIntact(mmHeap);
assertEquals(heapSize, mmHeap.size());
int currentHeapSize = heapSize;
for (int i = 0; i < numberOfModifications; i++) {
if (random.nextBoolean()) {
/* insert a new element */
int randomInt = random.nextInt();
mmHeap.offer(randomInt);
insertIntoReplica(replica, randomInt);
currentHeapSize++;
} else {
/* remove either min or max */
if (random.nextBoolean()) {
removeMinFromReplica(replica, mmHeap.poll());
} else {
removeMaxFromReplica(replica, mmHeap.pollLast());
}
for (Integer v : replica.keySet()) {
assertThat(mmHeap).contains(v);
}
assertIntact(mmHeap);
currentHeapSize--;
assertEquals(currentHeapSize, mmHeap.size());
}
}
assertEquals(currentHeapSize, mmHeap.size());
assertIntact(mmHeap);
}
public void testSmall() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.add(1);
mmHeap.add(4);
mmHeap.add(2);
mmHeap.add(3);
assertEquals(4, (int) mmHeap.pollLast());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(3, (int) mmHeap.pollLast());
assertEquals(1, (int) mmHeap.peek());
assertEquals(2, (int) mmHeap.peekLast());
assertEquals(2, (int) mmHeap.pollLast());
assertEquals(1, (int) mmHeap.peek());
assertEquals(1, (int) mmHeap.peekLast());
assertEquals(1, (int) mmHeap.pollLast());
assertNull(mmHeap.peek());
assertNull(mmHeap.peekLast());
assertNull(mmHeap.pollLast());
}
public void testSmallMinHeap() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.add(1);
mmHeap.add(3);
mmHeap.add(2);
assertEquals(1, (int) mmHeap.peek());
assertEquals(1, (int) mmHeap.poll());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(2, (int) mmHeap.peek());
assertEquals(2, (int) mmHeap.poll());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(3, (int) mmHeap.peek());
assertEquals(3, (int) mmHeap.poll());
assertNull(mmHeap.peekLast());
assertNull(mmHeap.peek());
assertNull(mmHeap.poll());
}
public void testRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2, 3, 4, 47, 1, 5, 3, 0));
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals(9, mmHeap.size());
mmHeap.remove(5);
assertEquals(8, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertEquals(47, (int) mmHeap.pollLast());
assertEquals(4, (int) mmHeap.pollLast());
mmHeap.removeAll(Lists.newArrayList(2, 3));
assertEquals(3, mmHeap.size());
assertTrue("Heap is not intact after removeAll()", mmHeap.isIntact());
}
public void testContains() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 1, 2));
assertEquals(3, mmHeap.size());
assertFalse("Heap does not contain null", mmHeap.contains(null));
assertFalse("Heap does not contain 3", mmHeap.contains(3));
assertFalse("Heap does not contain 3", mmHeap.remove(3));
assertEquals(3, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertTrue("Heap contains two 1's", mmHeap.contains(1));
assertTrue("Heap contains two 1's", mmHeap.remove(1));
assertTrue("Heap contains 1", mmHeap.contains(1));
assertTrue("Heap contains 1", mmHeap.remove(1));
assertFalse("Heap does not contain 1", mmHeap.contains(1));
assertTrue("Heap contains 2", mmHeap.remove(2));
assertEquals(0, mmHeap.size());
assertFalse("Heap does not contain anything", mmHeap.contains(1));
assertFalse("Heap does not contain anything", mmHeap.remove(2));
}
public void testIteratorPastEndException() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2));
Iterator<Integer> it = mmHeap.iterator();
assertTrue("Iterator has reached end prematurely", it.hasNext());
it.next();
it.next();
assertThrows(NoSuchElementException.class, () -> it.next());
}
public void testIteratorConcurrentModification() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2, 3, 4));
Iterator<Integer> it = mmHeap.iterator();
assertTrue("Iterator has reached end prematurely", it.hasNext());
it.next();
it.next();
mmHeap.remove(4);
assertThrows(ConcurrentModificationException.class, () -> it.next());
}
/** Tests a failure caused by fix to childless uncle issue. */
public void testIteratorRegressionChildlessUncle() {
ArrayList<Integer> initial = Lists.newArrayList(1, 15, 13, 8, 9, 10, 11, 14);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(initial);
assertIntact(q);
q.remove(9);
q.remove(11);
q.remove(10);
// Now we're in the critical state: [1, 15, 13, 8, 14]
// Removing 8 while iterating caused duplicates in iteration result.
List<Integer> result = Lists.newArrayListWithCapacity(initial.size());
for (Iterator<Integer> iter = q.iterator(); iter.hasNext(); ) {
Integer value = iter.next();
result.add(value);
if (value == 8) {
iter.remove();
}
}
assertIntact(q);
assertThat(result).containsExactly(1, 15, 13, 8, 14);
}
/**
* This tests a special case of the removeAt() call. Moving an element sideways on the heap could
* break the invariants. Sometimes we need to bubble an element up instead of trickling down. See
* implementation.
*/
public void testInvalidatingRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(
Lists.newArrayList(1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 300, 400, 500, 600));
assertEquals(15, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
mmHeap.remove(12);
assertEquals(14, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
}
/** This tests a more obscure special case, but otherwise similar to above. */
public void testInvalidatingRemove2() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
List<Integer> values =
Lists.newArrayList(
1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 300, 400, 500, 600, 4, 5, 6, 7, 8, 9, 4, 5,
200, 250);
mmHeap.addAll(values);
assertEquals(25, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
mmHeap.remove(2);
assertEquals(24, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
values.removeAll(Lists.newArrayList(2));
assertEquals(values.size(), mmHeap.size());
assertTrue(values.containsAll(mmHeap));
assertTrue(mmHeap.containsAll(values));
}
public void testIteratorInvalidatingIteratorRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 20, 100, 2, 3, 30, 40));
assertEquals(7, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
Iterator<Integer> it = mmHeap.iterator();
assertEquals((Integer) 1, it.next());
assertEquals((Integer) 20, it.next());
assertEquals((Integer) 100, it.next());
assertEquals((Integer) 2, it.next());
it.remove();
assertFalse(mmHeap.contains(2));
assertTrue(it.hasNext());
assertEquals((Integer) 3, it.next());
assertTrue(it.hasNext());
assertEquals((Integer) 30, it.next());
assertTrue(it.hasNext());
assertEquals((Integer) 40, it.next());
assertFalse(it.hasNext());
assertEquals(6, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertFalse(mmHeap.contains(2));
// This tests that it.remove() above actually changed the order. It
// indicates that the value 40 was stored in forgetMeNot, so it is
// returned in the last call to it.next(). Without it, 30 should be the last
// item returned by the iterator.
Integer lastItem = 0;
for (Integer tmp : mmHeap) {
lastItem = tmp;
}
assertEquals((Integer) 30, lastItem);
}
/**
* This tests a special case where removeAt has to trickle an element first down one level from a
* min to a max level, then up one level above the index of the removed element. It also tests
* that skipMe in the iterator plays nicely with forgetMeNot.
*/
public void testIteratorInvalidatingIteratorRemove2() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(
Lists.newArrayList(1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 200, 300, 500, 400));
assertTrue("Heap is not intact initially", mmHeap.isIntact());
Iterator<Integer> it = mmHeap.iterator();
assertEquals((Integer) 1, it.next());
assertEquals((Integer) 20, it.next());
assertEquals((Integer) 1000, it.next());
assertEquals((Integer) 2, it.next());
it.remove();
// After this remove, 400 has moved up and 20 down past cursor
assertTrue("Heap is not intact after remove", mmHeap.isIntact());
assertEquals((Integer) 10, it.next());
assertEquals((Integer) 3, it.next());
it.remove();
// After this remove, 400 moved down again and 500 up past the cursor
assertTrue("Heap is not intact after remove", mmHeap.isIntact());
assertEquals((Integer) 12, it.next());
assertEquals((Integer) 30, it.next());
assertEquals((Integer) 40, it.next());
// Skipping 20
assertEquals((Integer) 11, it.next());
// Not skipping 400, because it moved back down
assertEquals((Integer) 400, it.next());
assertEquals((Integer) 13, it.next());
assertEquals((Integer) 200, it.next());
assertEquals((Integer) 300, it.next());
// Last from forgetMeNot.
assertEquals((Integer) 500, it.next());
}
public void testRemoveFromStringHeap() {
MinMaxPriorityQueue<String> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(5)).create();
Collections.addAll(mmHeap, "foo", "bar", "foobar", "barfoo", "larry", "sergey", "eric");
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals("bar", mmHeap.peek());
assertEquals("sergey", mmHeap.peekLast());
assertEquals(7, mmHeap.size());
assertTrue("Could not remove larry", mmHeap.remove("larry"));
assertEquals(6, mmHeap.size());
assertFalse("heap contains larry which has been removed", mmHeap.contains("larry"));
assertTrue("heap does not contain sergey", mmHeap.contains("sergey"));
assertTrue("Could not remove larry", mmHeap.removeAll(Lists.newArrayList("sergey", "eric")));
assertFalse("Could remove nikesh which is not in the heap", mmHeap.remove("nikesh"));
assertEquals(4, mmHeap.size());
}
public void testCreateWithOrdering() {
MinMaxPriorityQueue<String> mmHeap =
MinMaxPriorityQueue.orderedBy(Ordering.<String>natural().reverse()).create();
Collections.addAll(mmHeap, "foo", "bar", "foobar", "barfoo", "larry", "sergey", "eric");
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals("sergey", mmHeap.peek());
assertEquals("bar", mmHeap.peekLast());
}
public void testCreateWithCapacityAndOrdering() {
MinMaxPriorityQueue<Integer> mmHeap =
MinMaxPriorityQueue.orderedBy(Ordering.<Integer>natural().reverse())
.expectedSize(5)
.create();
Collections.addAll(mmHeap, 1, 7, 2, 56, 2, 5, 23, 68, 0, 3);
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals(68, (int) mmHeap.peek());
assertEquals(0, (int) mmHeap.peekLast());
}
private <T extends Comparable<T>> void runIterator(List<T> values, int steps) throws Exception {
IteratorTester<T> tester =
new IteratorTester<T>(
steps,
IteratorFeature.MODIFIABLE,
new LinkedList<>(values),
IteratorTester.KnownOrder.UNKNOWN_ORDER) {
private @Nullable MinMaxPriorityQueue<T> mmHeap;
@Override
protected Iterator<T> newTargetIterator() {
mmHeap = MinMaxPriorityQueue.create(values);
return mmHeap.iterator();
}
@Override
protected void verify(List<T> elements) {
assertEquals(new HashSet<>(elements), newHashSet(mmHeap.iterator()));
assertIntact(mmHeap);
}
};
tester.test();
}
public void testIteratorTester() throws Exception {
Random random = new Random(0);
List<Integer> list = new ArrayList<>();
for (int i = 0; i < 3; i++) {
list.add(random.nextInt());
}
runIterator(list, 6);
}
public void testIteratorTesterLarger() throws Exception {
runIterator(Lists.newArrayList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 5);
}
public void testRemoveAt() {
long seed = new Random().nextLong();
Random random = new Random(seed);
int heapSize = 999;
int numberOfModifications = reduceIterationsIfGwt(500);
MinMaxPriorityQueue<Integer> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(heapSize)).create();
for (int i = 0; i < heapSize; i++) {
mmHeap.add(random.nextInt());
}
for (int i = 0; i < numberOfModifications; i++) {
mmHeap.removeAt(random.nextInt(mmHeap.size()));
assertIntactUsingSeed(seed, mmHeap);
mmHeap.add(random.nextInt());
assertIntactUsingSeed(seed, mmHeap);
}
}
public void testRemoveAt_exhaustive() {
int size = reduceExponentIfGwt(8);
List<Integer> expected = createOrderedList(size);
for (Collection<Integer> perm : Collections2.permutations(expected)) {
for (int i = 0; i < perm.size(); i++) {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(perm);
q.removeAt(i);
assertIntactUsingStartedWith(perm, q);
}
}
}
/** Regression test for bug found. */
public void testCorrectOrdering_regression() {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(ImmutableList.of(3, 5, 1, 4, 7));
List<Integer> expected = ImmutableList.of(1, 3, 4, 5, 7);
List<Integer> actual = new ArrayList<>(5);
for (int i = 0; i < expected.size(); i++) {
actual.add(q.pollFirst());
}
assertEquals(expected, actual);
}
public void testCorrectOrdering_smallHeapsPollFirst() {
for (int size = 2; size < 16; size++) {
for (int attempts = 0; attempts < size * (size - 1); attempts++) {
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
}
public void testCorrectOrdering_smallHeapsPollLast() {
for (int size = 2; size < 16; size++) {
for (int attempts = 0; attempts < size * (size - 1); attempts++) {
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(0, q.pollLast());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
}
public void testCorrectOrdering_mediumHeapsPollFirst() {
for (int attempts = 0; attempts < reduceIterationsIfGwt(5000); attempts++) {
int size = new Random().nextInt(256) + 16;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
/** Regression test for bug found in random testing. */
public void testCorrectOrdering_73ElementBug() {
int size = 73;
long seed = 7522346378524621981L;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
insertRandomly(elements, q, new Random(seed));
assertIntact(q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
assertIntact(q);
}
assertEqualsUsingSeed(seed, expected, elements);
}
public void testCorrectOrdering_mediumHeapsPollLast() {
for (int attempts = 0; attempts < reduceIterationsIfGwt(5000); attempts++) {
int size = new Random().nextInt(256) + 16;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(0, q.pollLast());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
public void testCorrectOrdering_randomAccess() {
long seed = new Random().nextLong();
Random random = new Random(seed);
PriorityQueue<Integer> control = new PriorityQueue<>();
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
for (int i = 0; i < 73; i++) { // 73 is a childless uncle case.
Integer element = random.nextInt();
control.add(element);
assertTrue(q.add(element));
}
assertIntact(q);
for (int i = 0; i < reduceIterationsIfGwt(500_000); i++) {
if (random.nextBoolean()) {
Integer element = random.nextInt();
control.add(element);
q.add(element);
} else {
assertEqualsUsingSeed(seed, control.poll(), q.pollFirst());
}
}
while (!control.isEmpty()) {
assertEqualsUsingSeed(seed, control.poll(), q.pollFirst());
}
assertTrue(q.isEmpty());
}
public void testExhaustive_pollAndPush() {
int size = 5;
List<Integer> expected = createOrderedList(size);
for (Collection<Integer> perm : Collections2.permutations(expected)) {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(perm);
List<Integer> elements = Lists.newArrayListWithCapacity(size);
while (!q.isEmpty()) {
Integer next = q.pollFirst();
for (int i = 0; i <= size; i++) {
assertTrue(q.add(i));
assertTrue(q.add(next));
assertTrue(q.remove(i));
assertEquals(next, q.poll());
}
elements.add(next);
}
assertEqualsUsingStartedWith(perm, expected, elements);
}
}
/** Regression test for b/4124577 */
public void testRegression_dataCorruption() {
int size = 8;
List<Integer> expected = createOrderedList(size);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(expected);
List<Integer> contents = new ArrayList<>(expected);
List<Integer> elements = Lists.newArrayListWithCapacity(size);
while (!q.isEmpty()) {
assertThat(q).containsExactlyElementsIn(contents);
Integer next = q.pollFirst();
contents.remove(next);
assertThat(q).containsExactlyElementsIn(contents);
for (int i = 0; i <= size; i++) {
q.add(i);
contents.add(i);
assertThat(q).containsExactlyElementsIn(contents);
q.add(next);
contents.add(next);
assertThat(q).containsExactlyElementsIn(contents);
q.remove(i);
assertTrue(contents.remove(Integer.valueOf(i)));
assertThat(q).containsExactlyElementsIn(contents);
assertEquals(next, q.poll());
contents.remove(next);
assertThat(q).containsExactlyElementsIn(contents);
}
elements.add(next);
}
assertEquals(expected, elements);
}
/** Regression test for https://github.com/google/guava/issues/2658 */
public void testRemoveRegression() {
MinMaxPriorityQueue<Long> queue =
MinMaxPriorityQueue.create(ImmutableList.of(2L, 3L, 0L, 4L, 1L));
queue.remove(4L);
queue.remove(1L);
assertThat(queue).doesNotContain(1L);
}
public void testRandomRemoves() {
Random random = new Random(0);
for (int attempts = 0; attempts < reduceIterationsIfGwt(1000); attempts++) {
ArrayList<Integer> elements = createOrderedList(10);
shuffle(elements, random);
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create(elements);
shuffle(elements, random);
for (Integer element : elements) {
assertThat(queue.remove(element)).isTrue();
assertIntact(queue);
assertThat(queue).doesNotContain(element);
}
assertThat(queue).isEmpty();
}
}
public void testRandomAddsAndRemoves() {
Random random = new Random(0);
Multiset<Integer> elements = HashMultiset.create();
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create();
int range = 10_000; // range should be small enough that equal elements occur semi-frequently
for (int iter = 0; iter < reduceIterationsIfGwt(1000); iter++) {
for (int i = 0; i < 100; i++) {
Integer element = random.nextInt(range);
elements.add(element);
queue.add(element);
}
Iterator<Integer> queueIterator = queue.iterator();
int remaining = queue.size();
while (queueIterator.hasNext()) {
Integer element = queueIterator.next();
remaining--;
assertThat(elements).contains(element);
if (random.nextBoolean()) {
elements.remove(element);
queueIterator.remove();
}
}
assertThat(remaining).isEqualTo(0);
assertIntact(queue);
assertThat(queue).containsExactlyElementsIn(elements);
}
}
private enum Element {
ONE,
TWO,
THREE,
FOUR,
FIVE;
}
public void testRandomAddsAndRemoves_duplicateElements() {
Random random = new Random(0);
Multiset<Element> elements = HashMultiset.create();
MinMaxPriorityQueue<Element> queue = MinMaxPriorityQueue.create();
int range = Element.values().length;
for (int iter = 0; iter < reduceIterationsIfGwt(1000); iter++) {
for (int i = 0; i < 100; i++) {
Element element = Element.values()[random.nextInt(range)];
elements.add(element);
queue.add(element);
}
Iterator<Element> queueIterator = queue.iterator();
int remaining = queue.size();
while (queueIterator.hasNext()) {
Element element = queueIterator.next();
remaining--;
assertThat(elements).contains(element);
if (random.nextBoolean()) {
elements.remove(element);
queueIterator.remove();
}
}
assertThat(remaining).isEqualTo(0);
assertIntact(queue);
assertThat(queue).containsExactlyElementsIn(elements);
}
}
/** Returns the seed used for the randomization. */
private long insertRandomly(ArrayList<Integer> elements, MinMaxPriorityQueue<Integer> q) {
long seed = new Random().nextLong();
Random random = new Random(seed);
insertRandomly(elements, q, random);
return seed;
}
private static void insertRandomly(
ArrayList<Integer> elements, MinMaxPriorityQueue<Integer> q, Random random) {
while (!elements.isEmpty()) {
int selectedIndex = random.nextInt(elements.size());
q.offer(elements.remove(selectedIndex));
}
}
private ArrayList<Integer> createOrderedList(int size) {
ArrayList<Integer> elements = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
elements.add(i);
}
return elements;
}
public void testIsEvenLevel() {
assertTrue(MinMaxPriorityQueue.isEvenLevel(0));
assertFalse(MinMaxPriorityQueue.isEvenLevel(1));
assertFalse(MinMaxPriorityQueue.isEvenLevel(2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(3));
assertFalse(MinMaxPriorityQueue.isEvenLevel((1 << 10) - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel((1 << 10) - 1));
int i = 1 << 29;
assertTrue(MinMaxPriorityQueue.isEvenLevel(i - 2));
assertFalse(MinMaxPriorityQueue.isEvenLevel(i - 1));
assertFalse(MinMaxPriorityQueue.isEvenLevel(i));
i = 1 << 30;
assertFalse(MinMaxPriorityQueue.isEvenLevel(i - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(i - 1));
assertTrue(MinMaxPriorityQueue.isEvenLevel(i));
// 1 << 31 is negative because of overflow, 1 << 31 - 1 is positive
// since isEvenLevel adds 1, we need to do - 2.
assertTrue(MinMaxPriorityQueue.isEvenLevel((1 << 31) - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(Integer.MAX_VALUE - 1));
assertThrows(IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel((1 << 31) - 1));
assertThrows(
IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(Integer.MAX_VALUE));
assertThrows(IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(1 << 31));
assertThrows(
IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(Integer.MIN_VALUE));
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicConstructors(MinMaxPriorityQueue.class);
tester.testAllPublicStaticMethods(MinMaxPriorityQueue.class);
tester.testAllPublicInstanceMethods(MinMaxPriorityQueue.<String>create());
}
private static void insertIntoReplica(Map<Integer, AtomicInteger> replica, int newValue) {
if (replica.containsKey(newValue)) {
replica.get(newValue).incrementAndGet();
} else {
replica.put(newValue, new AtomicInteger(1));
}
}
private static void removeMinFromReplica(
SortedMap<Integer, AtomicInteger> replica, int minValue) {
Integer replicatedMinValue = replica.firstKey();
assertEquals(replicatedMinValue, (Integer) minValue);
removeFromReplica(replica, replicatedMinValue);
}
private static void removeMaxFromReplica(
SortedMap<Integer, AtomicInteger> replica, int maxValue) {
Integer replicatedMaxValue = replica.lastKey();
assertTrue("maxValue is incorrect", replicatedMaxValue == maxValue);
removeFromReplica(replica, replicatedMaxValue);
}
private static void removeFromReplica(Map<Integer, AtomicInteger> replica, int value) {
AtomicInteger numOccur = replica.get(value);
if (numOccur.decrementAndGet() == 0) {
replica.remove(value);
}
}
private static void assertIntact(MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("State " + Arrays.toString(q.toArray()));
}
}
private static void assertIntactUsingSeed(long seed, MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("Using seed " + seed + ". State " + Arrays.toString(q.toArray()));
}
}
private static void assertIntactUsingStartedWith(
Collection<?> startedWith, MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("Started with " + startedWith + ". State " + Arrays.toString(q.toArray()));
}
}
private static void assertEqualsUsingSeed(
long seed, @Nullable Object expected, @Nullable Object actual) {
if (!Objects.equals(actual, expected)) {
// fail(), but with the JUnit-supplied message.
assertEquals("Using seed " + seed, expected, actual);
}
}
private static void assertEqualsUsingStartedWith(
Collection<?> startedWith, @Nullable Object expected, @Nullable Object actual) {
if (!Objects.equals(actual, expected)) {
// fail(), but with the JUnit-supplied message.
assertEquals("Started with " + startedWith, expected, actual);
}
}
// J2kt cannot translate the Comparable rawtype in a usable way (it becomes Comparable<Object>
// but types are typically only Comparable to themselves).
@SuppressWarnings({"rawtypes", "unchecked"})
private static MinMaxPriorityQueue.Builder<Comparable<?>> rawtypeToWildcard(
MinMaxPriorityQueue.Builder<Comparable> builder) {
return (MinMaxPriorityQueue.Builder) builder;
}
}
|
google-wallet/rest-samples | 36,627 | java/src/main/java/com/google/developers/wallet/rest/DemoOffer.java | /*
* Copyright 2022 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.developers.wallet.rest;
// [START setup]
// [START imports]
import com.auth0.jwt.JWT;
import com.auth0.jwt.algorithms.Algorithm;
import com.google.api.client.googleapis.batch.BatchRequest;
import com.google.api.client.googleapis.batch.json.JsonBatchCallback;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.*;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.services.walletobjects.*;
import com.google.api.services.walletobjects.model.*;
import com.google.auth.http.HttpCredentialsAdapter;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import java.io.*;
import java.security.interfaces.RSAPrivateKey;
import java.util.*;
// [END imports]
/** Demo class for creating and managing Offers in Google Wallet. */
public class DemoOffer {
/**
* Path to service account key file from Google Cloud Console. Environment variable:
* GOOGLE_APPLICATION_CREDENTIALS.
*/
public static String keyFilePath;
/** Service account credentials for Google Wallet APIs. */
public static GoogleCredentials credentials;
/** Google Wallet service client. */
public static Walletobjects service;
public DemoOffer() throws Exception {
keyFilePath =
System.getenv().getOrDefault("GOOGLE_APPLICATION_CREDENTIALS", "/path/to/key.json");
auth();
}
// [END setup]
// [START auth]
/**
* Create authenticated HTTP client using a service account file.
*
*/
public void auth() throws Exception {
credentials =
GoogleCredentials.fromStream(new FileInputStream(keyFilePath))
.createScoped(List.of(WalletobjectsScopes.WALLET_OBJECT_ISSUER));
credentials.refresh();
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
// Initialize Google Wallet API service
service =
new Walletobjects.Builder(
httpTransport,
GsonFactory.getDefaultInstance(),
new HttpCredentialsAdapter(credentials))
.setApplicationName("APPLICATION_NAME")
.build();
}
// [END auth]
// [START createClass]
/**
* Create a class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String createClass(String issuerId, String classSuffix) throws IOException {
// Check if the class exists
try {
service.offerclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
System.out.printf("Class %s.%s already exists!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() != 404) {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/offers/rest/v1/offerclass
OfferClass newClass =
new OfferClass()
.setId(String.format("%s.%s", issuerId, classSuffix))
.setIssuerName("Issuer name")
.setReviewStatus("UNDER_REVIEW")
.setProvider("Provider name")
.setTitle("Offer title")
.setRedemptionChannel("ONLINE");
OfferClass response = service.offerclass().insert(newClass).execute();
System.out.println("Class insert response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END createClass]
// [START updateClass]
/**
* Update a class.
*
* <p><strong>Warning:</strong> This replaces all existing class attributes!
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String updateClass(String issuerId, String classSuffix) throws IOException {
OfferClass updatedClass;
// Check if the class exists
try {
updatedClass =
service.offerclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// Class exists
// Update the class by adding a homepage
updatedClass.setHomepageUri(
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("Homepage description"));
// Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates
updatedClass.setReviewStatus("UNDER_REVIEW");
OfferClass response =
service
.offerclass()
.update(String.format("%s.%s", issuerId, classSuffix), updatedClass)
.execute();
System.out.println("Class update response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END updateClass]
// [START patchClass]
/**
* Patch a class.
*
* <p>The PATCH method supports patch semantics.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String patchClass(String issuerId, String classSuffix) throws IOException {
// Check if the class exists
try {
service.offerclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
// Class exists
// Patch the class by adding a homepage
OfferClass patchBody =
new OfferClass()
.setHomepageUri(
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("Homepage description"))
// Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates
.setReviewStatus("UNDER_REVIEW");
OfferClass response =
service
.offerclass()
.patch(String.format("%s.%s", issuerId, classSuffix), patchBody)
.execute();
System.out.println("Class patch response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END patchClass]
// [START addMessageClass]
/**
* Add a message to a pass class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param header The message header.
* @param body The message body.
* @return The pass class ID: "{issuerId}.{classSuffix}"
*/
public String addClassMessage(String issuerId, String classSuffix, String header, String body)
throws IOException {
// Check if the class exists
try {
service.offerclass().get(String.format("%s.%s", issuerId, classSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Class does not exist
System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix);
return String.format("%s.%s", issuerId, classSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, classSuffix);
}
}
AddMessageRequest message =
new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body));
OfferClassAddMessageResponse response =
service
.offerclass()
.addmessage(String.format("%s.%s", issuerId, classSuffix), message)
.execute();
System.out.println("Class addMessage response");
System.out.println(response.toPrettyString());
return String.format("%s.%s", issuerId, classSuffix);
}
// [END addMessageClass]
// [START createObject]
/**
* Create an object.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String createObject(String issuerId, String classSuffix, String objectSuffix)
throws IOException {
// Check if the object exists
try {
service.offerobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
System.out.printf("Object %s.%s already exists!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
// Do nothing
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/offers/rest/v1/offerobject
OfferObject newObject =
new OfferObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setValidTimeInterval(
new TimeInterval()
.setStart(new DateTime().setDate("2023-06-12T23:20:50.52Z"))
.setEnd(new DateTime().setDate("2023-12-12T23:20:50.52Z")));
OfferObject response = service.offerobject().insert(newObject).execute();
System.out.println("Object insert response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END createObject]
// [START updateObject]
/**
* Update an object.
*
* <p><strong>Warning:</strong> This replaces all existing object attributes!
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String updateObject(String issuerId, String objectSuffix) throws IOException {
OfferObject updatedObject;
// Check if the object exists
try {
updatedObject =
service.offerobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Object exists
// Update the object by adding a link
Uri newLink =
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("New link description");
if (updatedObject.getLinksModuleData() == null) {
// LinksModuleData was not set on the original object
updatedObject.setLinksModuleData(new LinksModuleData().setUris(List.of(newLink)));
} else {
updatedObject.getLinksModuleData().getUris().add(newLink);
}
OfferObject response =
service
.offerobject()
.update(String.format("%s.%s", issuerId, objectSuffix), updatedObject)
.execute();
System.out.println("Object update response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END updateObject]
// [START patchObject]
/**
* Patch an object.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String patchObject(String issuerId, String objectSuffix) throws IOException {
OfferObject existingObject;
// Check if the object exists
try {
existingObject =
service.offerobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Object exists
// Patch the object by adding a link
Uri newLink =
new Uri()
.setUri("https://developers.google.com/wallet")
.setDescription("New link description");
OfferObject patchBody = new OfferObject();
if (existingObject.getLinksModuleData() == null) {
// LinksModuleData was not set on the original object
patchBody.setLinksModuleData(new LinksModuleData().setUris(new ArrayList<Uri>()));
} else {
patchBody.setLinksModuleData(existingObject.getLinksModuleData());
}
patchBody.getLinksModuleData().getUris().add(newLink);
OfferObject response =
service
.offerobject()
.patch(String.format("%s.%s", issuerId, objectSuffix), patchBody)
.execute();
System.out.println("Object patch response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END patchObject]
// [START expireObject]
/**
* Expire an object.
*
* <p>Sets the object's state to Expired. If the valid time interval is already set, the pass will
* expire automatically up to 24 hours after.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String expireObject(String issuerId, String objectSuffix) throws IOException {
// Check if the object exists
try {
service.offerobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
// Patch the object, setting the pass as expired
OfferObject patchBody = new OfferObject().setState("EXPIRED");
OfferObject response =
service
.offerobject()
.patch(String.format("%s.%s", issuerId, objectSuffix), patchBody)
.execute();
System.out.println("Object expiration response");
System.out.println(response.toPrettyString());
return response.getId();
}
// [END expireObject]
// [START addMessageObject]
/**
* Add a message to a pass object.
*
* @param issuerId The issuer ID being used for this request.
* @param objectSuffix Developer-defined unique ID for this pass object.
* @param header The message header.
* @param body The message body.
* @return The pass object ID: "{issuerId}.{objectSuffix}"
*/
public String addObjectMessage(String issuerId, String objectSuffix, String header, String body)
throws IOException {
// Check if the object exists
try {
service.offerobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute();
} catch (GoogleJsonResponseException ex) {
if (ex.getStatusCode() == 404) {
// Object does not exist
System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix);
return String.format("%s.%s", issuerId, objectSuffix);
} else {
// Something else went wrong...
ex.printStackTrace();
return String.format("%s.%s", issuerId, objectSuffix);
}
}
AddMessageRequest message =
new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body));
OfferObjectAddMessageResponse response =
service
.offerobject()
.addmessage(String.format("%s.%s", issuerId, objectSuffix), message)
.execute();
System.out.println("Object addMessage response");
System.out.println(response.toPrettyString());
return String.format("%s.%s", issuerId, objectSuffix);
}
// [END addMessageObject]
// [START jwtNew]
/**
* Generate a signed JWT that creates a new pass class and object.
*
* <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the
* pass class and object defined in the JWT are created. This allows you to create multiple pass
* classes and objects in one API call when the user saves the pass to their wallet.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
* @param objectSuffix Developer-defined unique ID for the pass object.
* @return An "Add to Google Wallet" link.
*/
public String createJWTNewObjects(String issuerId, String classSuffix, String objectSuffix) {
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/offers/rest/v1/offerclass
OfferClass newClass =
new OfferClass()
.setId(String.format("%s.%s", issuerId, classSuffix))
.setIssuerName("Issuer name")
.setReviewStatus("UNDER_REVIEW")
.setProvider("Provider name")
.setTitle("Offer title")
.setRedemptionChannel("ONLINE");
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/offers/rest/v1/offerobject
OfferObject newObject =
new OfferObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setValidTimeInterval(
new TimeInterval()
.setStart(new DateTime().setDate("2023-06-12T23:20:50.52Z"))
.setEnd(new DateTime().setDate("2023-12-12T23:20:50.52Z")));
// Create the JWT as a HashMap object
HashMap<String, Object> claims = new HashMap<String, Object>();
claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail());
claims.put("aud", "google");
claims.put("origins", List.of("www.example.com"));
claims.put("typ", "savetowallet");
// Create the Google Wallet payload and add to the JWT
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("offerClasses", List.of(newClass));
payload.put("offerObjects", List.of(newObject));
claims.put("payload", payload);
// The service account credentials are used to sign the JWT
Algorithm algorithm =
Algorithm.RSA256(
null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey());
String token = JWT.create().withPayload(claims).sign(algorithm);
System.out.println("Add to Google Wallet link");
System.out.printf("https://pay.google.com/gp/v/save/%s%n", token);
return String.format("https://pay.google.com/gp/v/save/%s", token);
}
// [END jwtNew]
// [START jwtExisting]
/**
* Generate a signed JWT that references an existing pass object.
*
* <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the
* pass objects defined in the JWT are added to the user's Google Wallet app. This allows the user
* to save multiple pass objects in one API call.
*
* <p>The objects to add must follow the below format:
*
* <p>{ 'id': 'ISSUER_ID.OBJECT_SUFFIX', 'classId': 'ISSUER_ID.CLASS_SUFFIX' }
*
* @param issuerId The issuer ID being used for this request.
* @return An "Add to Google Wallet" link.
*/
public String createJWTExistingObjects(String issuerId) {
// Multiple pass types can be added at the same time
// At least one type must be specified in the JWT claims
// Note: Make sure to replace the placeholder class and object suffixes
HashMap<String, Object> objectsToAdd = new HashMap<String, Object>();
// Event tickets
objectsToAdd.put(
"eventTicketObjects",
List.of(
new EventTicketObject()
.setId(String.format("%s.%s", issuerId, "EVENT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "EVENT_CLASS_SUFFIX"))));
// Boarding passes
objectsToAdd.put(
"flightObjects",
List.of(
new FlightObject()
.setId(String.format("%s.%s", issuerId, "FLIGHT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "FLIGHT_CLASS_SUFFIX"))));
// Generic passes
objectsToAdd.put(
"genericObjects",
List.of(
new GenericObject()
.setId(String.format("%s.%s", issuerId, "GENERIC_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "GENERIC_CLASS_SUFFIX"))));
// Gift cards
objectsToAdd.put(
"giftCardObjects",
List.of(
new GiftCardObject()
.setId(String.format("%s.%s", issuerId, "GIFT_CARD_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "GIFT_CARD_CLASS_SUFFIX"))));
// Loyalty cards
objectsToAdd.put(
"loyaltyObjects",
List.of(
new LoyaltyObject()
.setId(String.format("%s.%s", issuerId, "LOYALTY_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "LOYALTY_CLASS_SUFFIX"))));
// Offers
objectsToAdd.put(
"offerObjects",
List.of(
new OfferObject()
.setId(String.format("%s.%s", issuerId, "OFFER_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "OFFER_CLASS_SUFFIX"))));
// Transit passes
objectsToAdd.put(
"transitObjects",
List.of(
new TransitObject()
.setId(String.format("%s.%s", issuerId, "TRANSIT_OBJECT_SUFFIX"))
.setClassId(String.format("%s.%s", issuerId, "TRANSIT_CLASS_SUFFIX"))));
// Create the JWT as a HashMap object
HashMap<String, Object> claims = new HashMap<String, Object>();
claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail());
claims.put("aud", "google");
claims.put("origins", List.of("www.example.com"));
claims.put("typ", "savetowallet");
claims.put("payload", objectsToAdd);
// The service account credentials are used to sign the JWT
Algorithm algorithm =
Algorithm.RSA256(
null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey());
String token = JWT.create().withPayload(claims).sign(algorithm);
System.out.println("Add to Google Wallet link");
System.out.printf("https://pay.google.com/gp/v/save/%s%n", token);
return String.format("https://pay.google.com/gp/v/save/%s", token);
}
// [END jwtExisting]
// [START batch]
/**
* Batch create Google Wallet objects from an existing class.
*
* @param issuerId The issuer ID being used for this request.
* @param classSuffix Developer-defined unique ID for this pass class.
*/
public void batchCreateObjects(String issuerId, String classSuffix) throws IOException {
// Create the batch request client
BatchRequest batch = service.batch(new HttpCredentialsAdapter(credentials));
// The callback will be invoked for each request in the batch
JsonBatchCallback<OfferObject> callback =
new JsonBatchCallback<OfferObject>() {
// Invoked if the request was successful
public void onSuccess(OfferObject response, HttpHeaders responseHeaders) {
System.out.println("Batch insert response");
System.out.println(response.toString());
}
// Invoked if the request failed
public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) {
System.out.println("Error Message: " + e.getMessage());
}
};
// Example: Generate three new pass objects
for (int i = 0; i < 3; i++) {
// Generate a random object suffix
String objectSuffix = UUID.randomUUID().toString().replaceAll("[^\\w.-]", "_");
// See link below for more information on required properties
// https://developers.google.com/wallet/retail/offers/rest/v1/offerobject
OfferObject batchObject =
new OfferObject()
.setId(String.format("%s.%s", issuerId, objectSuffix))
.setClassId(String.format("%s.%s", issuerId, classSuffix))
.setState("ACTIVE")
.setHeroImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Hero image description"))))
.setTextModulesData(
List.of(
new TextModuleData()
.setHeader("Text module header")
.setBody("Text module body")
.setId("TEXT_MODULE_ID")))
.setLinksModuleData(
new LinksModuleData()
.setUris(
Arrays.asList(
new Uri()
.setUri("http://maps.google.com/")
.setDescription("Link module URI description")
.setId("LINK_MODULE_URI_ID"),
new Uri()
.setUri("tel:6505555555")
.setDescription("Link module tel description")
.setId("LINK_MODULE_TEL_ID"))))
.setImageModulesData(
List.of(
new ImageModuleData()
.setMainImage(
new Image()
.setSourceUri(
new ImageUri()
.setUri(
"http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg"))
.setContentDescription(
new LocalizedString()
.setDefaultValue(
new TranslatedString()
.setLanguage("en-US")
.setValue("Image module description"))))
.setId("IMAGE_MODULE_ID")))
.setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value"))
.setLocations(
List.of(
new LatLongPoint()
.setLatitude(37.424015499999996)
.setLongitude(-122.09259560000001)))
.setValidTimeInterval(
new TimeInterval()
.setStart(new DateTime().setDate("2023-06-12T23:20:50.52Z"))
.setEnd(new DateTime().setDate("2023-12-12T23:20:50.52Z")));
service.offerobject().insert(batchObject).queue(batch, callback);
}
// Invoke the batch API calls
batch.execute();
}
// [END batch]
}
|
googleapis/google-cloud-java | 36,674 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ImportAgentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for
* [Agents.ImportAgent][google.cloud.dialogflow.v2beta1.Agents.ImportAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ImportAgentRequest}
*/
public final class ImportAgentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ImportAgentRequest)
ImportAgentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportAgentRequest.newBuilder() to construct.
private ImportAgentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportAgentRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportAgentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_ImportAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_ImportAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.class,
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.Builder.class);
}
private int agentCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object agent_;
public enum AgentCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
AGENT_URI(2),
AGENT_CONTENT(3),
AGENT_NOT_SET(0);
private final int value;
private AgentCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AgentCase valueOf(int value) {
return forNumber(value);
}
public static AgentCase forNumber(int value) {
switch (value) {
case 2:
return AGENT_URI;
case 3:
return AGENT_CONTENT;
case 0:
return AGENT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_CONTENT_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
@java.lang.Override
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (agentCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, agent_);
}
if (agentCase_ == 3) {
output.writeBytes(3, (com.google.protobuf.ByteString) agent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (agentCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, agent_);
}
if (agentCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
3, (com.google.protobuf.ByteString) agent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ImportAgentRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest other =
(com.google.cloud.dialogflow.v2beta1.ImportAgentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getAgentCase().equals(other.getAgentCase())) return false;
switch (agentCase_) {
case 2:
if (!getAgentUri().equals(other.getAgentUri())) return false;
break;
case 3:
if (!getAgentContent().equals(other.getAgentContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
switch (agentCase_) {
case 2:
hash = (37 * hash) + AGENT_URI_FIELD_NUMBER;
hash = (53 * hash) + getAgentUri().hashCode();
break;
case 3:
hash = (37 * hash) + AGENT_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getAgentContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Agents.ImportAgent][google.cloud.dialogflow.v2beta1.Agents.ImportAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ImportAgentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ImportAgentRequest)
com.google.cloud.dialogflow.v2beta1.ImportAgentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_ImportAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_ImportAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.class,
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
agentCase_ = 0;
agent_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_ImportAgentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ImportAgentRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ImportAgentRequest build() {
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ImportAgentRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.ImportAgentRequest result =
new com.google.cloud.dialogflow.v2beta1.ImportAgentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.ImportAgentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
private void buildPartialOneofs(com.google.cloud.dialogflow.v2beta1.ImportAgentRequest result) {
result.agentCase_ = agentCase_;
result.agent_ = this.agent_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ImportAgentRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ImportAgentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ImportAgentRequest other) {
if (other == com.google.cloud.dialogflow.v2beta1.ImportAgentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
switch (other.getAgentCase()) {
case AGENT_URI:
{
agentCase_ = 2;
agent_ = other.agent_;
onChanged();
break;
}
case AGENT_CONTENT:
{
setAgentContent(other.getAgentContent());
break;
}
case AGENT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
agentCase_ = 2;
agent_ = s;
break;
} // case 18
case 26:
{
agent_ = input.readBytes();
agentCase_ = 3;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int agentCase_ = 0;
private java.lang.Object agent_;
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public Builder clearAgent() {
agentCase_ = 0;
agent_ = null;
onChanged();
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to import is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
@java.lang.Override
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
@java.lang.Override
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentUri() {
if (agentCase_ == 2) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to import.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The bytes for agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @param value The agentContent to set.
* @return This builder for chaining.
*/
public Builder setAgentContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 3;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentContent() {
if (agentCase_ == 3) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ImportAgentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ImportAgentRequest)
private static final com.google.cloud.dialogflow.v2beta1.ImportAgentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ImportAgentRequest();
}
public static com.google.cloud.dialogflow.v2beta1.ImportAgentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportAgentRequest> PARSER =
new com.google.protobuf.AbstractParser<ImportAgentRequest>() {
@java.lang.Override
public ImportAgentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ImportAgentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportAgentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ImportAgentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,697 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/CreateTopicRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Request for CreateTopic.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.CreateTopicRequest}
*/
public final class CreateTopicRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.CreateTopicRequest)
CreateTopicRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTopicRequest.newBuilder() to construct.
private CreateTopicRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTopicRequest() {
parent_ = "";
topicId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTopicRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_CreateTopicRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_CreateTopicRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.CreateTopicRequest.class,
com.google.cloud.managedkafka.v1.CreateTopicRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TOPIC_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object topicId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The topicId.
*/
@java.lang.Override
public java.lang.String getTopicId() {
java.lang.Object ref = topicId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
topicId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for topicId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTopicIdBytes() {
java.lang.Object ref = topicId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
topicId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TOPIC_FIELD_NUMBER = 3;
private com.google.cloud.managedkafka.v1.Topic topic_;
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the topic field is set.
*/
@java.lang.Override
public boolean hasTopic() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The topic.
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Topic getTopic() {
return topic_ == null ? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance() : topic_;
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicOrBuilder() {
return topic_ == null ? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance() : topic_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topicId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, topicId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getTopic());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topicId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, topicId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getTopic());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.CreateTopicRequest)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.CreateTopicRequest other =
(com.google.cloud.managedkafka.v1.CreateTopicRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getTopicId().equals(other.getTopicId())) return false;
if (hasTopic() != other.hasTopic()) return false;
if (hasTopic()) {
if (!getTopic().equals(other.getTopic())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TOPIC_ID_FIELD_NUMBER;
hash = (53 * hash) + getTopicId().hashCode();
if (hasTopic()) {
hash = (37 * hash) + TOPIC_FIELD_NUMBER;
hash = (53 * hash) + getTopic().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.CreateTopicRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for CreateTopic.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.CreateTopicRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.CreateTopicRequest)
com.google.cloud.managedkafka.v1.CreateTopicRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_CreateTopicRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_CreateTopicRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.CreateTopicRequest.class,
com.google.cloud.managedkafka.v1.CreateTopicRequest.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.CreateTopicRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTopicFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
topicId_ = "";
topic_ = null;
if (topicBuilder_ != null) {
topicBuilder_.dispose();
topicBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_CreateTopicRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.CreateTopicRequest getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.CreateTopicRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.CreateTopicRequest build() {
com.google.cloud.managedkafka.v1.CreateTopicRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.CreateTopicRequest buildPartial() {
com.google.cloud.managedkafka.v1.CreateTopicRequest result =
new com.google.cloud.managedkafka.v1.CreateTopicRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedkafka.v1.CreateTopicRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.topicId_ = topicId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.topic_ = topicBuilder_ == null ? topic_ : topicBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.CreateTopicRequest) {
return mergeFrom((com.google.cloud.managedkafka.v1.CreateTopicRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.CreateTopicRequest other) {
if (other == com.google.cloud.managedkafka.v1.CreateTopicRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTopicId().isEmpty()) {
topicId_ = other.topicId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTopic()) {
mergeTopic(other.getTopic());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
topicId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getTopicFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent cluster in which to create the topic.
* Structured like
* `projects/{project}/locations/{location}/clusters/{cluster}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object topicId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The topicId.
*/
public java.lang.String getTopicId() {
java.lang.Object ref = topicId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
topicId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for topicId.
*/
public com.google.protobuf.ByteString getTopicIdBytes() {
java.lang.Object ref = topicId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
topicId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The topicId to set.
* @return This builder for chaining.
*/
public Builder setTopicId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
topicId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTopicId() {
topicId_ = getDefaultInstance().getTopicId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the topic, which will become the final
* component of the topic's name.
*
* This value is structured like: `my-topic-name`.
* </pre>
*
* <code>string topic_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for topicId to set.
* @return This builder for chaining.
*/
public Builder setTopicIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
topicId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.managedkafka.v1.Topic topic_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
topicBuilder_;
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the topic field is set.
*/
public boolean hasTopic() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The topic.
*/
public com.google.cloud.managedkafka.v1.Topic getTopic() {
if (topicBuilder_ == null) {
return topic_ == null
? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()
: topic_;
} else {
return topicBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTopic(com.google.cloud.managedkafka.v1.Topic value) {
if (topicBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
topic_ = value;
} else {
topicBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTopic(com.google.cloud.managedkafka.v1.Topic.Builder builderForValue) {
if (topicBuilder_ == null) {
topic_ = builderForValue.build();
} else {
topicBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTopic(com.google.cloud.managedkafka.v1.Topic value) {
if (topicBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& topic_ != null
&& topic_ != com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()) {
getTopicBuilder().mergeFrom(value);
} else {
topic_ = value;
}
} else {
topicBuilder_.mergeFrom(value);
}
if (topic_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTopic() {
bitField0_ = (bitField0_ & ~0x00000004);
topic_ = null;
if (topicBuilder_ != null) {
topicBuilder_.dispose();
topicBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.Topic.Builder getTopicBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTopicFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicOrBuilder() {
if (topicBuilder_ != null) {
return topicBuilder_.getMessageOrBuilder();
} else {
return topic_ == null
? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()
: topic_;
}
}
/**
*
*
* <pre>
* Required. Configuration of the topic to create. Its `name` field is
* ignored.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
getTopicFieldBuilder() {
if (topicBuilder_ == null) {
topicBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>(
getTopic(), getParentForChildren(), isClean());
topic_ = null;
}
return topicBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.CreateTopicRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.CreateTopicRequest)
private static final com.google.cloud.managedkafka.v1.CreateTopicRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.CreateTopicRequest();
}
public static com.google.cloud.managedkafka.v1.CreateTopicRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTopicRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTopicRequest>() {
@java.lang.Override
public CreateTopicRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateTopicRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTopicRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.CreateTopicRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,690 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApiVersionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for ListApiVersions.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiVersionsRequest}
*/
public final class ListApiVersionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApiVersionsRequest)
ListApiVersionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApiVersionsRequest.newBuilder() to construct.
private ListApiVersionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApiVersionsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApiVersionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of versions to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest other =
(com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListApiVersions.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiVersionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApiVersionsRequest)
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.class,
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApiVersionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest build() {
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest result =
new com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest other) {
if (other == com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of versions.
* Format: `projects/*/locations/*/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of versions to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of versions to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of versions to return.
* The service may return fewer than this value.
* If unspecified, at most 50 values will be returned.
* The maximum is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListApiVersions` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListApiVersions` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* An expression that can be used to filter the list. Filters use the Common
* Expression Language and can refer to all message fields.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApiVersionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApiVersionsRequest)
private static final com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest();
}
public static com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApiVersionsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListApiVersionsRequest>() {
@java.lang.Override
public ListApiVersionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApiVersionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApiVersionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApiVersionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,722 | java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/ListSupportedDatabaseFlagsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1;
/**
*
*
* <pre>
* Message for listing the information about the supported Database flags.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest}
*/
public final class ListSupportedDatabaseFlagsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest)
ListSupportedDatabaseFlagsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSupportedDatabaseFlagsRequest.newBuilder() to construct.
private ListSupportedDatabaseFlagsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSupportedDatabaseFlagsRequest() {
parent_ = "";
pageToken_ = "";
scope_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSupportedDatabaseFlagsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListSupportedDatabaseFlagsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListSupportedDatabaseFlagsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.class,
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SCOPE_FIELD_NUMBER = 6;
private int scope_ = 0;
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for scope.
*/
@java.lang.Override
public int getScopeValue() {
return scope_;
}
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The scope.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope getScope() {
com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope result =
com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.forNumber(scope_);
return result == null
? com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (scope_
!= com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.SCOPE_UNSPECIFIED.getNumber()) {
output.writeEnum(6, scope_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (scope_
!= com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.SCOPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, scope_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest other =
(com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (scope_ != other.scope_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + SCOPE_FIELD_NUMBER;
hash = (53 * hash) + scope_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for listing the information about the supported Database flags.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest)
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListSupportedDatabaseFlagsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListSupportedDatabaseFlagsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.class,
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
scope_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListSupportedDatabaseFlagsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest
getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest build() {
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest buildPartial() {
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest result =
new com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.scope_ = scope_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest) {
return mergeFrom((com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest other) {
if (other
== com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.scope_ != 0) {
setScopeValue(other.getScopeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 48:
{
scope_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 48
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource. The required format is:
* * projects/{project}/locations/{location}
*
* Regardless of the parent specified here, as long it is contains a valid
* project and location, the service will return a static list of supported
* flags resources. Note that we do not yet support region-specific
* flags.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int scope_ = 0;
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for scope.
*/
@java.lang.Override
public int getScopeValue() {
return scope_;
}
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for scope to set.
* @return This builder for chaining.
*/
public Builder setScopeValue(int value) {
scope_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The scope.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope getScope() {
com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope result =
com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.forNumber(scope_);
return result == null
? com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The scope to set.
* @return This builder for chaining.
*/
public Builder setScope(com.google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
scope_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The scope for which supported flags are requested. If not
* specified, default is DATABASE.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearScope() {
bitField0_ = (bitField0_ & ~0x00000008);
scope_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest)
private static final com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest();
}
public static com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListSupportedDatabaseFlagsRequest>() {
@java.lang.Override
public ListSupportedDatabaseFlagsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListSupportedDatabaseFlagsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,728 | java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/ListDataStreamsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1beta/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1beta;
/**
*
*
* <pre>
* Response message for ListDataStreams RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.ListDataStreamsResponse}
*/
public final class ListDataStreamsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.ListDataStreamsResponse)
ListDataStreamsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataStreamsResponse.newBuilder() to construct.
private ListDataStreamsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataStreamsResponse() {
dataStreams_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataStreamsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListDataStreamsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListDataStreamsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.ListDataStreamsResponse.class,
com.google.analytics.admin.v1beta.ListDataStreamsResponse.Builder.class);
}
public static final int DATA_STREAMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1beta.DataStream> dataStreams_;
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1beta.DataStream> getDataStreamsList() {
return dataStreams_;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.admin.v1beta.DataStreamOrBuilder>
getDataStreamsOrBuilderList() {
return dataStreams_;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
@java.lang.Override
public int getDataStreamsCount() {
return dataStreams_.size();
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.DataStream getDataStreams(int index) {
return dataStreams_.get(index);
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.DataStreamOrBuilder getDataStreamsOrBuilder(int index) {
return dataStreams_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dataStreams_.size(); i++) {
output.writeMessage(1, dataStreams_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dataStreams_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dataStreams_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1beta.ListDataStreamsResponse)) {
return super.equals(obj);
}
com.google.analytics.admin.v1beta.ListDataStreamsResponse other =
(com.google.analytics.admin.v1beta.ListDataStreamsResponse) obj;
if (!getDataStreamsList().equals(other.getDataStreamsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDataStreamsCount() > 0) {
hash = (37 * hash) + DATA_STREAMS_FIELD_NUMBER;
hash = (53 * hash) + getDataStreamsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1beta.ListDataStreamsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListDataStreams RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.ListDataStreamsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.ListDataStreamsResponse)
com.google.analytics.admin.v1beta.ListDataStreamsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListDataStreamsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListDataStreamsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.ListDataStreamsResponse.class,
com.google.analytics.admin.v1beta.ListDataStreamsResponse.Builder.class);
}
// Construct using com.google.analytics.admin.v1beta.ListDataStreamsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dataStreamsBuilder_ == null) {
dataStreams_ = java.util.Collections.emptyList();
} else {
dataStreams_ = null;
dataStreamsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListDataStreamsResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListDataStreamsResponse getDefaultInstanceForType() {
return com.google.analytics.admin.v1beta.ListDataStreamsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListDataStreamsResponse build() {
com.google.analytics.admin.v1beta.ListDataStreamsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListDataStreamsResponse buildPartial() {
com.google.analytics.admin.v1beta.ListDataStreamsResponse result =
new com.google.analytics.admin.v1beta.ListDataStreamsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1beta.ListDataStreamsResponse result) {
if (dataStreamsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dataStreams_ = java.util.Collections.unmodifiableList(dataStreams_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dataStreams_ = dataStreams_;
} else {
result.dataStreams_ = dataStreamsBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.admin.v1beta.ListDataStreamsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1beta.ListDataStreamsResponse) {
return mergeFrom((com.google.analytics.admin.v1beta.ListDataStreamsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1beta.ListDataStreamsResponse other) {
if (other == com.google.analytics.admin.v1beta.ListDataStreamsResponse.getDefaultInstance())
return this;
if (dataStreamsBuilder_ == null) {
if (!other.dataStreams_.isEmpty()) {
if (dataStreams_.isEmpty()) {
dataStreams_ = other.dataStreams_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDataStreamsIsMutable();
dataStreams_.addAll(other.dataStreams_);
}
onChanged();
}
} else {
if (!other.dataStreams_.isEmpty()) {
if (dataStreamsBuilder_.isEmpty()) {
dataStreamsBuilder_.dispose();
dataStreamsBuilder_ = null;
dataStreams_ = other.dataStreams_;
bitField0_ = (bitField0_ & ~0x00000001);
dataStreamsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDataStreamsFieldBuilder()
: null;
} else {
dataStreamsBuilder_.addAllMessages(other.dataStreams_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1beta.DataStream m =
input.readMessage(
com.google.analytics.admin.v1beta.DataStream.parser(), extensionRegistry);
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
dataStreams_.add(m);
} else {
dataStreamsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1beta.DataStream> dataStreams_ =
java.util.Collections.emptyList();
private void ensureDataStreamsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dataStreams_ =
new java.util.ArrayList<com.google.analytics.admin.v1beta.DataStream>(dataStreams_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.DataStream,
com.google.analytics.admin.v1beta.DataStream.Builder,
com.google.analytics.admin.v1beta.DataStreamOrBuilder>
dataStreamsBuilder_;
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1beta.DataStream> getDataStreamsList() {
if (dataStreamsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dataStreams_);
} else {
return dataStreamsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public int getDataStreamsCount() {
if (dataStreamsBuilder_ == null) {
return dataStreams_.size();
} else {
return dataStreamsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public com.google.analytics.admin.v1beta.DataStream getDataStreams(int index) {
if (dataStreamsBuilder_ == null) {
return dataStreams_.get(index);
} else {
return dataStreamsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder setDataStreams(int index, com.google.analytics.admin.v1beta.DataStream value) {
if (dataStreamsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataStreamsIsMutable();
dataStreams_.set(index, value);
onChanged();
} else {
dataStreamsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder setDataStreams(
int index, com.google.analytics.admin.v1beta.DataStream.Builder builderForValue) {
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
dataStreams_.set(index, builderForValue.build());
onChanged();
} else {
dataStreamsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder addDataStreams(com.google.analytics.admin.v1beta.DataStream value) {
if (dataStreamsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataStreamsIsMutable();
dataStreams_.add(value);
onChanged();
} else {
dataStreamsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder addDataStreams(int index, com.google.analytics.admin.v1beta.DataStream value) {
if (dataStreamsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDataStreamsIsMutable();
dataStreams_.add(index, value);
onChanged();
} else {
dataStreamsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder addDataStreams(
com.google.analytics.admin.v1beta.DataStream.Builder builderForValue) {
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
dataStreams_.add(builderForValue.build());
onChanged();
} else {
dataStreamsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder addDataStreams(
int index, com.google.analytics.admin.v1beta.DataStream.Builder builderForValue) {
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
dataStreams_.add(index, builderForValue.build());
onChanged();
} else {
dataStreamsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder addAllDataStreams(
java.lang.Iterable<? extends com.google.analytics.admin.v1beta.DataStream> values) {
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dataStreams_);
onChanged();
} else {
dataStreamsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder clearDataStreams() {
if (dataStreamsBuilder_ == null) {
dataStreams_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dataStreamsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public Builder removeDataStreams(int index) {
if (dataStreamsBuilder_ == null) {
ensureDataStreamsIsMutable();
dataStreams_.remove(index);
onChanged();
} else {
dataStreamsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public com.google.analytics.admin.v1beta.DataStream.Builder getDataStreamsBuilder(int index) {
return getDataStreamsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public com.google.analytics.admin.v1beta.DataStreamOrBuilder getDataStreamsOrBuilder(
int index) {
if (dataStreamsBuilder_ == null) {
return dataStreams_.get(index);
} else {
return dataStreamsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public java.util.List<? extends com.google.analytics.admin.v1beta.DataStreamOrBuilder>
getDataStreamsOrBuilderList() {
if (dataStreamsBuilder_ != null) {
return dataStreamsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dataStreams_);
}
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public com.google.analytics.admin.v1beta.DataStream.Builder addDataStreamsBuilder() {
return getDataStreamsFieldBuilder()
.addBuilder(com.google.analytics.admin.v1beta.DataStream.getDefaultInstance());
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public com.google.analytics.admin.v1beta.DataStream.Builder addDataStreamsBuilder(int index) {
return getDataStreamsFieldBuilder()
.addBuilder(index, com.google.analytics.admin.v1beta.DataStream.getDefaultInstance());
}
/**
*
*
* <pre>
* List of DataStreams.
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.DataStream data_streams = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1beta.DataStream.Builder>
getDataStreamsBuilderList() {
return getDataStreamsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.DataStream,
com.google.analytics.admin.v1beta.DataStream.Builder,
com.google.analytics.admin.v1beta.DataStreamOrBuilder>
getDataStreamsFieldBuilder() {
if (dataStreamsBuilder_ == null) {
dataStreamsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.DataStream,
com.google.analytics.admin.v1beta.DataStream.Builder,
com.google.analytics.admin.v1beta.DataStreamOrBuilder>(
dataStreams_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
dataStreams_ = null;
}
return dataStreamsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.ListDataStreamsResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.ListDataStreamsResponse)
private static final com.google.analytics.admin.v1beta.ListDataStreamsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.ListDataStreamsResponse();
}
public static com.google.analytics.admin.v1beta.ListDataStreamsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataStreamsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDataStreamsResponse>() {
@java.lang.Override
public ListDataStreamsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataStreamsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataStreamsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListDataStreamsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 36,907 | prj/test/functional/topics/src/main/java/topics/AbstractTopicChannelCountTests.java |
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package topics;
import com.oracle.bedrock.Option;
import com.oracle.bedrock.junit.CoherenceClusterResource;
import com.oracle.bedrock.runtime.coherence.CoherenceClusterMember;
import com.oracle.bedrock.runtime.coherence.options.CacheConfig;
import com.oracle.bedrock.runtime.coherence.options.ClusterName;
import com.oracle.bedrock.runtime.coherence.options.LocalHost;
import com.oracle.bedrock.runtime.coherence.options.LocalStorage;
import com.oracle.bedrock.runtime.coherence.options.Logging;
import com.oracle.bedrock.runtime.coherence.options.RoleName;
import com.oracle.bedrock.runtime.coherence.options.WellKnownAddress;
import com.oracle.bedrock.runtime.java.features.JmxFeature;
import com.oracle.bedrock.runtime.java.options.IPv4Preferred;
import com.oracle.bedrock.runtime.java.options.SystemProperty;
import com.oracle.bedrock.runtime.options.DisplayName;
import com.oracle.bedrock.testsupport.deferred.Eventually;
import com.oracle.bedrock.testsupport.junit.TestLogs;
import com.oracle.coherence.common.base.Logger;
import com.oracle.coherence.testing.junit.ThreadDumpOnTimeoutRule;
import com.tangosol.internal.net.topic.NamedTopicPublisher;
import com.tangosol.internal.net.topic.NamedTopicSubscriber;
import com.tangosol.internal.net.topic.impl.paged.management.SubscriberModel;
import com.tangosol.net.CacheFactory;
import com.tangosol.net.Cluster;
import com.tangosol.net.Session;
import com.tangosol.net.topic.NamedTopic;
import com.tangosol.net.topic.Position;
import com.tangosol.net.topic.Publisher;
import com.tangosol.net.topic.Subscriber;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import topics.callables.GetChannelsWithMessages;
import topics.callables.PublishMessages;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import static com.tangosol.net.topic.Subscriber.completeOnEmpty;
import static com.tangosol.net.topic.Subscriber.inGroup;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.number.OrderingComparison.greaterThan;
import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo;
@SuppressWarnings({"resource", "unchecked"})
public abstract class AbstractTopicChannelCountTests
{
@BeforeClass
public static void setup()
{
System.setProperty(LocalStorage.PROPERTY, "false");
}
@Before
public void startCluster() throws Throwable
{
if (m_cluster != null)
{
m_cluster.getCluster().close();
}
m_cluster = new CoherenceClusterResource()
.with(ClusterName.of(m_testWatcher.getMethodName()),
Logging.atMax(),
LocalHost.only(),
WellKnownAddress.loopback(),
IPv4Preferred.yes())
.include(STORAGE_MEMBER_COUNT,
CoherenceClusterMember.class,
CacheConfig.of(CACHE_CONFIG_FILE),
SystemProperty.of(PROP_CHANNELS, STORAGE_CHANNEL_COUNT),
DisplayName.of("Storage"),
RoleName.of("storage"),
JmxFeature.enabled(),
m_testLogs.builder());
m_cluster.before();
}
@After
public void cleanup()
{
Cluster cluster = CacheFactory.getCluster();
CacheFactory.shutdown();
Eventually.assertDeferred(cluster::isRunning, is(false));
m_cluster.after();
}
@Test
public void shouldPublishWhenPublisherHasFewerChannels() throws Exception
{
String sTopicName = getTopicName();
int cChannel = 5;
try (Session session = createSession(SystemProperty.of(PROP_CHANNELS, cChannel)))
{
NamedTopic<String> topic = session.getTopic(sTopicName);
topic.ensureSubscriberGroup(sTopicName);
int cActual = topic.getChannelCount();
Set<Integer> setChannel = new HashSet<>();
try (Publisher<String> publisher = topic.createPublisher(Publisher.OrderBy.roundRobin()))
{
for (int i = 0; i < 100; i++)
{
Publisher.Status status = publisher.publish("message-" + i).get(1, TimeUnit.MINUTES);
assertThat(status, is(notNullValue()));
setChannel.add(status.getChannel());
}
}
// should have published to all channels based on the publisher's channel count
assertThat(setChannel.size(), is(cActual));
CoherenceClusterMember member = m_cluster.getCluster()
.findAny()
.orElseThrow(() -> new AssertionError("Could not find cluster member"));
Set<Integer> setActualChannel = member.invoke(new GetChannelsWithMessages(sTopicName));
assertThat(setActualChannel.size(), is(cActual));
for (int i = 0; i < cActual; i++)
{
assertThat(setChannel.contains(i), is(true));
assertThat(setActualChannel.contains(i), is(true));
}
}
}
@Test
public void shouldPublishWhenPublisherConfiguredWithMoreChannels() throws Exception
{
String sTopicName = getTopicName();
String sGroupName = "test-group";
int cChannel = 34;
try (Session session = createSession())
{
NamedTopic<Integer> topic = session.getTopic(sTopicName);
topic.ensureSubscriberGroup(sGroupName);
try (Subscriber<Integer> subscriberOne = topic.createSubscriber(inGroup(sGroupName), Subscriber.CompleteOnEmpty.enabled());
Subscriber<Integer> subscriberTwo = topic.createSubscriber(inGroup(sGroupName), Subscriber.CompleteOnEmpty.enabled()))
{
try (Publisher<Integer> publisher = topic.createPublisher(NamedTopicPublisher.ChannelCount.of(cChannel), Publisher.OrderBy.roundRobin()))
{
CompletableFuture<Publisher.Status>[] aFuture = new CompletableFuture[cChannel];
for (int i = 0; i < cChannel; i++)
{
aFuture[i] = publisher.publish(i);
}
for (int i = 0; i < cChannel; i++)
{
Eventually.assertDeferred(aFuture[i]::isDone, is(true));
// the future should have completed normally
aFuture[i].get();
}
CoherenceClusterMember member = m_cluster.getCluster()
.findAny()
.orElseThrow(() -> new AssertionError("Could not find cluster member"));
// should have pages in 34 channels
Set<Integer> setActualChannel = member.invoke(new GetChannelsWithMessages(sTopicName));
assertThat(setActualChannel.size(), is(cChannel));
Set<Integer> setMessage = new HashSet<>();
Subscriber.Element<Integer> element = subscriberOne.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
setMessage.add(element.getValue());
element = subscriberOne.receive().get(1, TimeUnit.MINUTES);
}
element = subscriberTwo.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
setMessage.add(element.getValue());
element = subscriberTwo.receive().get(1, TimeUnit.MINUTES);
}
// should have received from all channels based on the publisher's channel count
assertThat(setMessage.size(), is(cChannel));
}
}
}
}
@Test
public void shouldPublishWhenPublisherCreatedWithMoreChannels() throws Exception
{
String sTopicName = getTopicName();
int cChannel = 34;
try (Session session = createSession())
{
NamedTopic<Integer> topic = session.getTopic(sTopicName);
topic.ensureSubscriberGroup(sTopicName);
// member is configured with 17 channels, publisher created with 34
try (Publisher<Integer> publisher = topic.createPublisher(NamedTopicPublisher.ChannelCount.of(cChannel),
Publisher.OrderBy.value(n -> n)))
{
CompletableFuture<Publisher.Status>[] aFuture = new CompletableFuture[cChannel];
for (int i = 0; i < cChannel; i++)
{
aFuture[i] = publisher.publish(i);
}
for (int i = 0; i < cChannel; i++)
{
Eventually.assertDeferred(aFuture[i]::isDone, is(true));
// we're effectively asserting the future completed normally
aFuture[i].get();
}
CoherenceClusterMember member = m_cluster.getCluster()
.findAny()
.orElseThrow(() -> new AssertionError("Could not find cluster member"));
Set<Integer> setActualChannel = member.invoke(new GetChannelsWithMessages(sTopicName));
assertThat(setActualChannel.size(), is(cChannel));
Set<Integer> setMessage = new HashSet<>();
int cReceived = 0;
try (Subscriber<Integer> subscriber = topic.createSubscriber(inGroup("test"), Subscriber.CompleteOnEmpty.enabled()))
{
Subscriber.Element<Integer> element = subscriber.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
setMessage.add(element.getValue());
cReceived++;
element = subscriber.receive().get(1, TimeUnit.MINUTES);
}
}
// should have received from all channels based on the publisher's channel count
assertThat(cReceived, is(cChannel));
assertThat(setMessage.size(), is(cChannel));
}
}
}
@Test
public void shouldSubscribeWhenSubscriberHasMoreChannels() throws Exception
{
CoherenceClusterMember member = m_cluster.getCluster()
.findAny()
.orElseThrow(() -> new AssertionError("Could not find cluster member"));
String sTopicName = getTopicName();
String sGroupName = "test";
int cChannel = 34;
int cMessage = 1000;
try (Session session = createSession(SystemProperty.of(PROP_CHANNELS, cChannel)))
{
NamedTopic<String> topic = session.getTopic(sTopicName);
topic.ensureSubscriberGroup(sGroupName);
member.invoke(new PublishMessages(sTopicName, cMessage));
Set<String> setMessage = new HashSet<>();
int cReceived = 0;
try (Subscriber<String> subscriber = topic.createSubscriber(inGroup(sGroupName), Subscriber.CompleteOnEmpty.enabled()))
{
Subscriber.Element<String> element = subscriber.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
setMessage.add(element.getValue());
cReceived++;
element = subscriber.receive().get(1, TimeUnit.MINUTES);
}
}
// should have published to all channels based on the publisher's channel count
assertThat(cReceived, is(cMessage));
assertThat(setMessage.size(), is(cMessage));
}
}
@Test
public void shouldSubscribeWhenSubscriberHasFewerChannels() throws Exception
{
CoherenceClusterMember member = m_cluster.getCluster()
.findAny()
.orElseThrow(() -> new AssertionError("Could not find cluster member"));
String sTopicName = getTopicName();
String sGroupName = "test";
int cChannel = 5;
int cMessage = 1000;
try (Session session = createSession(SystemProperty.of(PROP_CHANNELS, cChannel)))
{
NamedTopic<String> topic = session.getTopic(sTopicName);
topic.ensureSubscriberGroup(sGroupName);
member.invoke(new PublishMessages(sTopicName, cMessage));
Set<String> setMessage = new HashSet<>();
int cReceived = 0;
try (Subscriber<String> subscriber = topic.createSubscriber(inGroup(sGroupName), Subscriber.CompleteOnEmpty.enabled()))
{
Subscriber.Element<String> element = subscriber.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
setMessage.add(element.getValue());
cReceived++;
element = subscriber.receive().get(1, TimeUnit.MINUTES);
}
}
// should have published to all channels based on the publisher's channel count
assertThat(cReceived, is(cMessage));
assertThat(setMessage.size(), is(cMessage));
}
}
@Test
@Ignore("Skipped until Bug 34767222 is fixed")
public void shouldIncreaseChannelCountWhileActive() throws Exception
{
String sTopicName = getTopicName();
try (Session session = createSession())
{
ExecutorService executorSub = Executors.newFixedThreadPool(5, new ThreadFactory()
{
@Override
public Thread newThread(Runnable r)
{
return new Thread(r, "SubscriberTask-" + m_count.getAndIncrement());
}
static final AtomicInteger m_count = new AtomicInteger();
});
ExecutorService executorPub = Executors.newFixedThreadPool(5, new ThreadFactory()
{
@Override
public Thread newThread(Runnable r)
{
return new Thread(r, "PublisherTask-" + m_count.getAndIncrement());
}
static final AtomicInteger m_count = new AtomicInteger();
});
NamedTopic<String> topic = session.getTopic(sTopicName);
List<SubscriberTask> subscribers = new ArrayList<>();
List<PublisherTask> publishers = new ArrayList<>();
Queue<Integer> queueId = new LinkedList<>();
for (int i = 0; i < 10; i++)
{
Integer nId = queueId.poll();
if (nId == null)
{
subscribers.add(new SubscriberTask(topic, i));
}
else
{
subscribers.add(new SubscriberTask(topic, i, nId));
}
}
int cChannel = 17;
int nId = 0;
for (int i = 0; i < 5; i++)
{
publishers.add(new PublisherTask(topic, nId++, cChannel));
}
for (int i = 0; i < 5; i++)
{
Thread.sleep(2000);
cChannel = cChannel + (i * 3);
publishers.add(new PublisherTask(topic, nId++, cChannel));
}
subscribers.forEach(executorSub::submit);
publishers.forEach(executorPub::submit);
executorPub.shutdown();
executorSub.shutdown();
executorPub.awaitTermination(5, TimeUnit.MINUTES);
executorSub.awaitTermination(5, TimeUnit.MINUTES);
Map<PositionAndChannel, String> mapPublished = new HashMap<>();
Map<PositionAndChannel, String> mapReceived = new HashMap<>();
for (PublisherTask publisherTask : publishers)
{
Eventually.assertDeferred(publisherTask.m_future::isDone, is(true));
publisherTask.m_future.get();
mapPublished.putAll(publisherTask.m_mapMessage);
}
for (SubscriberTask subscriberTask : subscribers)
{
Eventually.assertDeferred(subscriberTask.m_future::isDone, is(true));
subscriberTask.m_future.get();
mapReceived.putAll(subscriberTask.m_mapMessage);
}
FinalSubscriberTask finalSubscriberTask = new FinalSubscriberTask(topic, subscribers.size());
subscribers.add(finalSubscriberTask);
finalSubscriberTask.run();
mapReceived.putAll(finalSubscriberTask.m_mapMessage);
int[] anPublished = mapPublished.keySet().stream().mapToInt(k -> k.m_cChannel).distinct().sorted().toArray();
int[] anReceived = mapReceived.keySet().stream().mapToInt(k -> k.m_cChannel).distinct().sorted().toArray();
Set<PositionAndChannel> setMissing = new HashSet<>(mapPublished.keySet());
setMissing.removeAll(mapReceived.keySet());
Logger.info("Published to : " + Arrays.toString(anPublished) + " of " + cChannel);
Logger.info("Received from : " + Arrays.toString(anReceived) + " of " + cChannel);
Logger.info("Missing : ");
if (!setMissing.isEmpty())
{
// The tes will fail in later assertions below, so we dump out some debug information
Set<Integer> setChannel = new HashSet<>();
for (PositionAndChannel p : setMissing)
{
setChannel.add(p.m_cChannel);
Logger.info(p.toString());
}
for (int nChannel : setChannel)
{
for (SubscriberTask subscriberTask : subscribers)
{
Position position = subscriberTask.m_mapFirst.get(nChannel);
if (position != null)
{
Logger.info("Channel " + nChannel + " Subscriber " + subscriberTask.m_nId
+ " first=" + position + " last=" + subscriberTask.m_mapLast.get(nChannel));
}
}
}
for (SubscriberTask subscriber : subscribers)
{
subscriber.dumpStats();
}
}
assertThat(mapReceived.size(), is(mapPublished.size()));
assertThat(mapReceived, is(mapPublished));
long cChannelPublished = mapPublished.keySet().stream()
.mapToInt(k -> k.m_cChannel)
.distinct()
.count();
long cChannelReceived = mapReceived.keySet().stream()
.mapToInt(k -> k.m_cChannel)
.distinct()
.count();
assertThat(cChannelReceived, is(cChannelPublished));
}
}
@Test
public void shouldIncreaseChannelCountWhileActiveSubscriber() throws Exception
{
String sTopicName = getTopicName();
try (Session session = createSession())
{
NamedTopic<String> topic = session.getTopic(sTopicName);
int nChannelOne = 1;
int nChannelTwo = 21;
try (Subscriber<String> subscriberOne = (NamedTopicSubscriber<String>) topic.createSubscriber(inGroup("test-group"), completeOnEmpty(), NamedTopicSubscriber.withIdentifyingName("one"));
Publisher<String> publisherOne = topic.createPublisher(NamedTopicPublisher.ChannelCount.of(17), Publisher.OrderBy.id(nChannelOne));
Publisher<String> publisherTwo = topic.createPublisher(NamedTopicPublisher.ChannelCount.of(34), Publisher.OrderBy.id(nChannelTwo)))
{
List<String> listMessage = new ArrayList<>();
for (int i = 0; i < 10; i++)
{
publisherOne.publish("message-1-" + i).get(1, TimeUnit.MINUTES);
}
Subscriber.Element<String> element = subscriberOne.receive().get(1, TimeUnit.MINUTES);
String sValue = element.getValue() + " from " + subscriberOne;
listMessage.add(sValue);
System.err.println("Received: " + sValue);
element.commit();
System.err.println("Publishing from Publisher Two: " + publisherTwo);
publisherTwo.publish("message-2-0").get(1, TimeUnit.MINUTES);
System.err.println("Creating Additional Subscribers");
NamedTopicSubscriber<String> subscriberTwo = (NamedTopicSubscriber<String>) topic.createSubscriber(inGroup("test-group"), completeOnEmpty(), NamedTopicSubscriber.withIdentifyingName("two"));
NamedTopicSubscriber<String> subscriberThree = (NamedTopicSubscriber<String>) topic.createSubscriber(inGroup("test-group"), completeOnEmpty(), NamedTopicSubscriber.withIdentifyingName("three"));
System.err.println("Receive from Subscriber Two");
element = subscriberTwo.receive().get(1, TimeUnit.MINUTES);
if (element != null)
{
sValue = element.getValue() + " from " + subscriberTwo;
listMessage.add(sValue);
System.err.println("Received: " + sValue);
element.commit();
}
System.err.println("Receive from Subscriber Three");
element = subscriberThree.receive().get(1, TimeUnit.MINUTES);
if (element != null)
{
sValue = element.getValue() + " from " + subscriberThree;
listMessage.add(sValue);
System.err.println("Received: " + sValue);
element.commit();
}
System.err.println("Subscribers:");
System.err.println(subscriberOne);
System.err.println(subscriberTwo);
System.err.println(subscriberThree);
long cNotifyTwo = subscriberTwo.getNotify();
long cNotifyThree = subscriberThree.getNotify();
System.err.println("Publishing more messages from publisher two");
for (int i = 1; i < 10; i++)
{
publisherTwo.publish("message-2-" + i).get(1, TimeUnit.MINUTES);
}
// wait for whichever subscriber owns channel "nChannelTwo" to be notified of additional messages
//noinspection StatementWithEmptyBody
if (subscriberOne.isOwner(nChannelTwo))
{
// nothing to do as subscriber one will not be waiting
}
else if (subscriberTwo.isOwner(nChannelTwo))
{
System.err.println("Waiting for Subscriber two to be notified");
Eventually.assertDeferred(subscriberTwo::getNotify, is(greaterThan(cNotifyTwo)));
}
else if (subscriberThree.isOwner(nChannelTwo))
{
System.err.println("Waiting for Subscriber three to be notified");
Eventually.assertDeferred(subscriberThree::getNotify, is(greaterThan(cNotifyThree)));
}
for (Subscriber<String> subscriber : Arrays.asList(subscriberOne, subscriberTwo, subscriberThree))
{
System.err.println(">>> Using " + subscriber);
// subscriber.printChannels(System.err);
// subscriber.printPreFetchCache(System.err);
element = subscriber.receive().get(1, TimeUnit.MINUTES);
while (element != null)
{
sValue = element.getValue() + " from " + subscriber;
listMessage.add(sValue);
System.err.println("Received: " + sValue);
element.commit();
element = subscriber.receive().get(1, TimeUnit.MINUTES);
}
}
System.err.println(">>>> Received messages:");
listMessage.forEach(System.err::println);
assertThat(listMessage.size(), is(greaterThanOrEqualTo(20)));
}
}
}
// ----- helper methods -------------------------------------------------
protected abstract String getTopicName();
protected abstract Session createSession(Option... options);
// ----- inner class: SubscriberTask ------------------------------------
static class SubscriberTask
implements Runnable
{
public SubscriberTask(NamedTopic<String> topic, int nId)
{
this(topic, nId, -1);
}
public SubscriberTask(NamedTopic<String> topic, int nId, int nNotificationId)
{
m_topic = topic;
m_nId = nId;
m_nNotificationId = nNotificationId;
}
public Map<PositionAndChannel, String> getMessages()
{
return m_mapMessage;
}
protected Subscriber<String> createSubscriber()
{
if (m_nNotificationId == -1)
{
return m_topic.createSubscriber(inGroup("test"),
NamedTopicSubscriber.withIdentifyingName(String.valueOf(m_nId)),
Subscriber.CompleteOnEmpty.enabled());
}
return m_topic.createSubscriber(inGroup("test"),
NamedTopicSubscriber.withIdentifyingName(String.valueOf(m_nId)),
NamedTopicSubscriber.withNotificationId(m_nNotificationId),
Subscriber.CompleteOnEmpty.enabled());
}
@Override
public void run()
{
try (Subscriber<String> subscriber = m_subscriber = createSubscriber())
{
Logger.info("SubscriberTask starting: " + subscriber);
try
{
long nStart = System.currentTimeMillis();
long nNow = nStart;
long nEnd = nStart + 2000 + ((long) m_nId * 1000);
while (nNow < nEnd)
{
CompletableFuture<Subscriber.Element<String>> future = subscriber.receive();
Subscriber.Element<String> element = future.get(1, TimeUnit.MINUTES);
if (element != null)
{
int nChannel = element.getChannel();
Position position = element.getPosition();
m_mapFirst.putIfAbsent(nChannel, position);
m_mapLast.put(nChannel, position);
m_mapMessage.put(new PositionAndChannel(nChannel, position), element.getValue());
element.commit();
}
nNow = System.currentTimeMillis();
}
m_future.complete(null);
Logger.info("SubscriberTask " + m_nId + " completed (" + m_mapMessage.size() + ") subscriber=" + subscriber);
}
catch (TimeoutException e)
{
Logger.err("Subscriber " + m_nId + " timed-out subscriber=" + subscriber);
m_future.complete(null);
}
}
catch (Throwable t)
{
m_future.completeExceptionally(t);
Logger.info("SubscriberTask " + m_nId + " completed exceptionally (" + m_mapMessage.size() + ") " + t.getMessage());
}
}
public void dumpStats()
{
if (m_subscriber != null)
{
NamedTopicSubscriber<?> subscriber = (NamedTopicSubscriber<?>) m_subscriber;
System.err.println("--------------------------------------------------");
System.err.println("Attributes for subscriber: id=" + subscriber.getId()
+ " name=" + subscriber.getIdentifyingName());
SubscriberModel model = new SubscriberModel((NamedTopicSubscriber<?>) m_subscriber);
model.dumpAttributes(System.err);
System.err.println("--------------------------------------------------");
}
}
protected final NamedTopic<String> m_topic;
protected Subscriber<String> m_subscriber;
protected final int m_nId;
protected final int m_nNotificationId;
protected final Map<PositionAndChannel, String> m_mapMessage = new HashMap<>();
protected final Map<Integer, Position> m_mapFirst = new HashMap<>();
protected final Map<Integer, Position> m_mapLast = new HashMap<>();
protected final CompletableFuture<Void> m_future = new CompletableFuture<>();
}
// ----- inner class: FinalSubscriberTask -------------------------------
protected static class FinalSubscriberTask
extends SubscriberTask
{
public FinalSubscriberTask(NamedTopic<String> topic, int nId)
{
super(topic, nId);
}
public FinalSubscriberTask(NamedTopic<String> topic, int nId, int nNotificationId)
{
super(topic, nId, nNotificationId);
}
@Override
public void run()
{
try (Subscriber<String> subscriber = m_subscriber = createSubscriber())
{
Logger.info("FinalSubscriberTask starting: " + subscriber);
CompletableFuture<Subscriber.Element<String>> future = subscriber.receive();
Subscriber.Element<String> element = future.get(1, TimeUnit.MINUTES);
while (element != null)
{
m_mapMessage.put(new PositionAndChannel(element.getChannel(), element.getPosition()), element.getValue());
//element.commit();
element = subscriber.receive().get(1, TimeUnit.MINUTES);
}
m_future.complete(null);
Logger.info("FinalSubscriberTask " + m_nId + " completed (" + m_mapMessage.size() + ") subscriber=" + subscriber);
}
catch (Throwable t)
{
m_future.completeExceptionally(t);
}
}
}
// ----- inner class: PublisherTask -------------------------------------
protected static class PublisherTask
implements Runnable
{
public PublisherTask(NamedTopic<String> topic, int nId, int cChannel)
{
m_topic = topic;
m_nId = nId;
m_cChannel = cChannel;
}
public Map<PositionAndChannel, String> getMessages()
{
return m_mapMessage;
}
@Override
public void run()
{
Logger.info("PublisherTask " + m_nId + " starting");
try (Publisher<String> publisher = m_topic.createPublisher(Publisher.OrderBy.roundRobin(),
NamedTopicPublisher.ChannelCount.of(m_cChannel)))
{
long nStart = System.currentTimeMillis();
long nNow = nStart;
long nEnd = nStart + 2000 + ((long) m_nId * 1000);
int cMessage = 0;
while (nNow < nEnd)
{
for (int i = 0; i < m_cChannel; i++)
{
String sMessage = "message-" + m_nId + "-" + cMessage++;
CompletableFuture<Publisher.Status> future = publisher.publish(sMessage);
Publisher.Status status = future.get(1, TimeUnit.MINUTES);
m_mapMessage.put(new PositionAndChannel(status.getChannel(), status.getPosition()), sMessage);
}
nNow = System.currentTimeMillis();
}
m_future.complete(null);
}
catch (Throwable t)
{
m_future.completeExceptionally(t);
}
Logger.info("PublisherTask " + m_nId + " completed (" + m_mapMessage.size() + ")");
}
private final NamedTopic<String> m_topic;
private final int m_nId;
private final int m_cChannel;
private final Map<PositionAndChannel, String> m_mapMessage = new HashMap<>();
private final CompletableFuture<Void> m_future = new CompletableFuture<>();
}
// ----- inner class: PositionAndChannel --------------------------------
protected static class PositionAndChannel
implements Comparable<PositionAndChannel>
{
public PositionAndChannel(int cChannel, Position position)
{
m_cChannel = cChannel;
m_position = position;
}
@Override
public int compareTo(PositionAndChannel other)
{
int n = Integer.compare(m_cChannel, other.m_cChannel);
return n == 0 ? m_position.compareTo(other.m_position) : n;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
PositionAndChannel that = (PositionAndChannel) o;
return m_cChannel == that.m_cChannel && Objects.equals(m_position, that.m_position);
}
@Override
public int hashCode()
{
return Objects.hash(m_cChannel, m_position);
}
@Override
public String toString()
{
return "[channel=" + m_cChannel + ":" + m_position + ']';
}
private final int m_cChannel;
private final Position m_position;
}
// ----- data members ---------------------------------------------------
@ClassRule
public static final ThreadDumpOnTimeoutRule timeout = ThreadDumpOnTimeoutRule.after(30, TimeUnit.MINUTES);
public static final String PROP_CHANNELS = "coherence.channel.count";
public static final int STORAGE_MEMBER_COUNT = 3;
public static final int STORAGE_CHANNEL_COUNT = 17;
public static final String CACHE_CONFIG_FILE = "topics-channel-config.xml";
@Rule(order = 0)
public TestLogs m_testLogs = new TestLogs(NamedTopicTests.class);
@Rule(order = 1)
public TestName m_testWatcher = new TestName();
public CoherenceClusterResource m_cluster;
}
|
google/closure-compiler | 35,915 | test/com/google/javascript/jscomp/VariableReferenceCheckTest.java | /*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.VariableReferenceCheck.DECLARATION_NOT_DIRECTLY_IN_BLOCK;
import static com.google.javascript.jscomp.VariableReferenceCheck.EARLY_EXPORTS_REFERENCE;
import static com.google.javascript.jscomp.VariableReferenceCheck.EARLY_REFERENCE;
import static com.google.javascript.jscomp.VariableReferenceCheck.EARLY_REFERENCE_ERROR;
import static com.google.javascript.jscomp.VariableReferenceCheck.REASSIGNED_CONSTANT;
import static com.google.javascript.jscomp.VariableReferenceCheck.REDECLARED_VARIABLE;
import static com.google.javascript.jscomp.VariableReferenceCheck.REDECLARED_VARIABLE_ERROR;
import com.google.javascript.jscomp.deps.ModuleLoader;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test that warnings are generated in appropriate cases and appropriate cases only by
* VariableReferenceCheck
*/
@RunWith(JUnit4.class)
public final class VariableReferenceCheckTest extends CompilerTestCase {
private static final String LET_RUN = "let a = 1; let b = 2; let c = a + b, d = c;";
private static final String VARIABLE_RUN = "var a = 1; var b = 2; var c = a + b, d = c;";
@Override
protected CompilerPass getProcessor(Compiler compiler) {
// Treats bad reads as errors, and reports bad write warnings.
return new VariableReferenceCheck(compiler);
}
@Test
public void testWithImportMeta() {
// just to confirm that presence of import.meta does not cause a compiler crash
testSame("export function g() { return import.meta; }");
}
@Test
public void testDoubleTryCatch() {
testSame(
"""
function g() {
return f;
function f() {
try {
} catch (e) {
alert(e);
}
try {
} catch (e) {
alert(e);
}
}
}
""");
}
@Test
public void testDoubleTryCatch_withES6Modules() {
testSame(
"""
export
function g() {
return f;
function f() {
try {
} catch (e) {
alert(e);
}
try {
} catch (e) {
alert(e);
}
}
}
""");
}
@Test
public void testCorrectCode() {
assertNoWarning("function foo(d) { (function() { d.foo(); }); d.bar(); } ");
assertNoWarning("function foo() { bar(); } function bar() { foo(); } ");
assertNoWarning("function f(d) { d = 3; }");
assertNoWarning(VARIABLE_RUN);
assertNoWarning("if (a) { var x; }");
assertNoWarning("function f() { " + VARIABLE_RUN + "}");
assertNoWarning(LET_RUN);
assertNoWarning("function f() { " + LET_RUN + "}");
assertNoWarning("try { let e; } catch (e) { let x; }");
}
@Test
public void testCorrectCode_withES6Modules() {
assertNoWarning("export function foo(d) { (function() { d.foo(); }); d.bar(); } ");
}
@Test
public void testCorrectShadowing() {
assertNoWarning(VARIABLE_RUN + "function f() { " + VARIABLE_RUN + "}");
}
@Test
public void testCorrectShadowing_withES6Modules() {
assertNoWarning(VARIABLE_RUN + "export function f() { " + VARIABLE_RUN + "}");
}
@Test
public void testCorrectRedeclare() {
assertNoWarning("function f() { if (1) { var a = 2; } else { var a = 3; } }");
}
@Test
public void testCorrectRedeclare_withES6Modules() {
assertNoWarning("export function f() { if (1) { var a = 2; } else { var a = 3; } }");
}
@Test
public void testCorrectRecursion() {
assertNoWarning("function f() { var x = function() { x(); }; }");
}
@Test
public void testCorrectRecursion_withES6Modules() {
assertNoWarning("export function f() { var x = function() { x(); }; }");
}
@Test
public void testCorrectCatch() {
assertNoWarning("function f() { try { var x = 2; } catch (x) {} }");
assertNoWarning("function f(e) { e = 3; try {} catch (e) {} }");
}
@Test
public void testCorrectCatch_withES6Modules() {
assertNoWarning("export function f() { try { var x = 2; } catch (x) {} }");
}
@Test
public void testRedeclare() {
// Only test local scope since global scope is covered elsewhere
assertRedeclare("function f() { var a = 2; var a = 3; }");
assertRedeclare("function f(a) { var a = 2; }");
assertRedeclare("function f(a) { if (!a) var a = 6; }");
// NOTE: We decided to not give warnings to the following cases. The function won't be
// overwritten at runtime anyway.
assertNoWarning("function f() { var f = 1; }");
assertNoWarning("function f() { let f = 1; }");
}
@Test
public void testRedeclare_withES6Modules() {
assertRedeclare("export function f() { var a = 2; var a = 3; }");
assertNoWarning("export function f() { let f = 1; }");
// In an ES6 module vars are in the module scope, not global, so they are covered here.
assertRedeclare("export var a = 2; var a = 3;");
assertRedeclare("export var a = 2; if (a) var a = 3;");
assertRedeclare("function f() {} function f() {} export {f};");
}
@Test
public void testIssue166a() {
assertRedeclareError("try { throw 1 } catch(e) { /** @suppress {duplicate} */ var e=2 }");
}
@Test
public void testIssue166b() {
assertRedeclareError(
"function a() { try { throw 1 } catch(e) { /** @suppress {duplicate} */ var e=2 } };");
}
@Test
public void testIssue166b_withES6Modules() {
assertRedeclareError(
"""
export function a() {
try {
throw 1
} catch (e) {
/** @suppress {duplicate} */
var e = 2
}
};
""");
}
@Test
public void testIssue166c() {
assertRedeclareError(
"var e = 0; try { throw 1 } catch(e) { /** @suppress {duplicate} */ var e=2 }");
}
@Test
public void testIssue166d() {
assertRedeclareError(
"""
function a() {
var e = 0; try { throw 1 } catch(e) {
/** @suppress {duplicate} */ var e = 2;
}
};
""");
}
@Test
public void testIssue166e() {
testSame("var e = 2; try { throw 1 } catch(e) {}");
}
@Test
public void testIssue166e_withES6Modules() {
testSame("export var e = 2; try { throw 1 } catch(e) {}");
}
@Test
public void testIssue166f() {
testSame(
"""
function a() {
var e = 2;
try { throw 1 } catch(e) {}
}
""");
}
@Test
public void testEarlyReference() {
assertEarlyReferenceWarning("function f() { a = 2; var a = 3; }");
}
@Test
public void testEarlyReference_withES6Modules() {
assertEarlyReferenceWarning("export function f() { a = 2; var a = 3; }");
}
@Test
public void testCorrectEarlyReference() {
assertNoWarning("var goog = goog || {}");
assertNoWarning("var google = google || window['google'] || {}");
assertNoWarning("function f() { a = 2; } var a = 2;");
}
@Test
public void testCorrectEarlyReferenceLogicalAssignment() {
// These patterns are normalized away
assertNoWarning("function f() { a ||= {}; } let a;");
assertNoWarning("function f() { a &&= {}; } let a;");
assertNoWarning("function f() { a ??= {}; } let a;");
}
@Test
public void testCorrectEarlyReference_withES6Modules() {
assertNoWarning("export function f() { a = 2; } var a = 2;");
}
@Test
public void testUnreferencedBleedingFunction() {
assertNoWarning("var x = function y() {}");
assertNoWarning("var x = function y() {}; var y = 1;");
}
@Test
public void testUnreferencedBleedingFunction_withES6Modules() {
assertNoWarning("export var x = function y() {}");
}
@Test
public void testReferencedBleedingFunction() {
assertNoWarning("var x = function y() { return y(); }");
}
@Test
public void testReferencedBleedingFunction_withES6Modules() {
assertNoWarning("export var x = function y() { return y(); }");
}
@Test
public void testVarShadowsFunctionName() {
assertNoWarning("var x = function y() { var y; }");
assertNoWarning("var x = function y() { let y; }");
}
@Test
public void testVarShadowsFunctionName_withES6Modules() {
assertNoWarning("export var x = function y() { var y; }");
assertNoWarning("export var x = function y() { let y; }");
}
@Test
public void testDoubleDeclaration() {
assertRedeclare("function x(y) { if (true) { var y; } }");
}
@Test
public void testDoubleDeclaration2() {
assertRedeclare("function x() { var y; if (true) { var y; } }");
}
@Test
public void testDoubleDeclaration_withES6Modules() {
assertRedeclare("export function x(y) { if (true) { var y; } }");
}
@Test
public void testHoistedFunction1() {
assertNoWarning("f(); function f() {}");
}
@Test
public void testHoistedFunction2() {
assertNoWarning("function g() { f(); function f() {} }");
}
@Test
public void testHoistedFunction_withES6Modules() {
assertNoWarning("export function g() { f(); function f() {} }");
}
@Test
public void testNonHoistedFunction() {
assertEarlyReferenceWarning("if (true) { f(); function f() {} }");
}
@Test
public void testNonHoistedFunction2() {
assertNoWarning("if (false) { function f() {} f(); }");
}
@Test
public void testNonHoistedFunction3() {
assertNoWarning("function g() { if (false) { function f() {} f(); }}");
}
@Test
public void testNonHoistedFunction4() {
assertNoWarning("if (false) { function f() {} } f();");
}
@Test
public void testNonHoistedFunction5() {
assertNoWarning("function g() { if (false) { function f() {} } f(); }");
}
@Test
public void testNonHoistedFunction6() {
assertEarlyReferenceWarning("if (false) { f(); function f() {} }");
}
@Test
public void testNonHoistedFunction7() {
assertEarlyReferenceWarning("function g() { if (false) { f(); function f() {} }}");
}
@Test
public void testNonHoistedFunction_withES6Modules() {
assertEarlyReferenceWarning("export function g() { if (false) { f(); function f() {} }}");
}
@Test
public void testNonHoistedRecursiveFunction1() {
assertNoWarning("if (false) { function f() { f(); }}");
}
@Test
public void testNonHoistedRecursiveFunction2() {
assertNoWarning("function g() { if (false) { function f() { f(); }}}");
}
@Test
public void testNonHoistedRecursiveFunction3() {
assertNoWarning("function g() { if (false) { function f() { f(); g(); }}}");
}
@Test
public void testNonHoistedRecursiveFunction_withES6Modules() {
assertNoWarning("export function g() { if (false) { function f() { f(); g(); }}}");
}
@Test
public void testForOf() {
assertEarlyReferenceError("for (let x of []) { console.log(x); let x = 123; }");
assertNoWarning("for (let x of []) { let x; }");
}
@Test
public void testForAwaitOf() {
assertEarlyReferenceError(
"async () => { for await (let x of []) { console.log(x); let x = 123; } }");
assertNoWarning("async () => { for (let x of []) { let x; } }");
}
@Test
public void testDestructuringInFor() {
testSame("for (let [key, val] of X){}");
testSame("for (let [key, [nestKey, nestVal], val] of X){}");
testSame("var {x: a, y: b} = {x: 1, y: 2}; a++; b++;");
testWarning("a++; var {x: a} = {x: 1};", EARLY_REFERENCE);
}
@Test
public void testSuppressDuplicate_first() {
String code = "/** @suppress {duplicate} */ var google; var google";
testSame(code);
}
@Test
public void testSuppressDuplicate_second() {
String code = "var google; /** @suppress {duplicate} */ var google";
testSame(code);
}
@Test
public void testSuppressDuplicate_fileoverview() {
String code =
"""
/** @fileoverview @suppress {duplicate} */
/** @type {?} */ var google;
var google
""";
testSame(code);
}
@Test
public void testNoWarnDuplicateInExterns2() {
// Verify we don't complain about early references in externs
String externs = "window; var window;";
String code = "";
testSame(externs(externs), srcs(code));
}
@Test
public void testNoWarnDuplicateInExterns_withES6Modules() {
String externs = "export var google; /** @suppress {duplicate} */ var google";
String code = "";
testSame(externs(externs), srcs(code));
}
@Test
public void testImportStar() {
testSame(
srcs(
SourceFile.fromCode("foo.js", ""),
SourceFile.fromCode("bar.js", "import * as ns from './foo.js'")));
}
@Test
public void testUnusedCompoundAssign_withES6Modules() {
assertNoWarning(
"""
export function f(elapsed) {
let fakeMs = 0;
stubs.replace(Date, 'now', () => fakeMs -= elapsed);
}
""");
}
@Test
public void testGoogModule_duplicateRequire() {
assertRedeclareError(
"goog.module('bar'); const X = goog.require('foo.X'); const X = goog.require('foo.X');");
assertRedeclareError(
"goog.module('bar'); let X = goog.require('foo.X'); let X = goog.require('foo.X');");
assertRedeclareError(
"goog.module('bar'); const X = goog.require('foo.X'); let X = goog.require('foo.X');");
assertRedeclareError(
"goog.module('bar'); let X = goog.require('foo.X'); const X = goog.require('foo.X');");
}
@Test
public void testGoogProvide_ok() {
assertNoWarning("goog.provide('foo');");
assertNoWarning("goog.provide('foo'); foo = 0;");
assertNoWarning("goog.provide('foo'); var foo = 0;");
assertNoWarning("goog.provide('foo.bar');");
assertNoWarning("goog.provide('foo.bar'); foo.bar = 0;");
}
@Test
public void testUndeclaredLet() {
assertEarlyReferenceError("if (a) { x = 3; let x;}");
assertEarlyReferenceError(
"""
var x = 1;
if (true) {
x++;
let x = 3;
}
""");
}
@Test
public void testUndeclaredLet_withES6Modules() {
assertEarlyReferenceError(
"""
export var x = 1;
if (true) {
x++;
let x = 3;
}
""");
}
@Test
public void testUndeclaredConst() {
assertEarlyReferenceError("if (a) { x = 3; const x = 3;}");
// For the following, IE 11 gives "Assignment to const", but technically
// they are also undeclared references, which get caught in the first place.
assertEarlyReferenceError(
"""
var x = 1;
if (true) {
x++;
const x = 3;
}
""");
assertEarlyReferenceError("a = 1; const a = 0;");
assertEarlyReferenceError("a++; const a = 0;");
}
@Test
public void testIllegalLetShadowing() {
assertRedeclareError("if (a) { let x; var x;}");
assertRedeclareError("if (a) { let x; let x;}");
assertRedeclareError(
"""
function f() {
let x;
if (a) {
var x;
}
}
""");
assertNoWarning(
"""
function f() {
if (a) {
let x;
}
var x;
}
""");
assertNoWarning(
"""
function f() {
if (a) { let x; }
if (b) { var x; }
}
""");
assertRedeclareError("let x; var x;");
assertRedeclareError("var x; let x;");
assertRedeclareError("let x; let x;");
}
@Test
public void testIllegalLetShadowing_withES6Modules() {
assertRedeclareError(
"""
export function f() {
let x;
if (a) {
var x;
}
}
""");
assertNoWarning(
"""
export function f() {
if (a) {
let x;
}
var x;
}
""");
assertRedeclareError("export let x; var x;");
}
@Test
public void testDuplicateLetConst() {
assertRedeclareError("let x, x;");
assertRedeclareError("const x = 0, x = 0;");
}
@Test
public void testRedeclareInLabel() {
assertRedeclareGlobal("a: var x, x;");
}
@Test
public void testIllegalBlockScopedEarlyReference() {
assertEarlyReferenceError("let x = x");
assertEarlyReferenceError("let [x] = x");
assertEarlyReferenceError("const x = x");
assertEarlyReferenceError("let x = x || 0");
assertEarlyReferenceError("const x = x || 0");
// In the following cases, "x" might not be reachable but we warn anyways
assertEarlyReferenceError("let x = expr || x");
assertEarlyReferenceError("const x = expr || x");
assertEarlyReferenceError("X; class X {};");
}
@Test
public void testIllegalConstShadowing() {
assertRedeclareError("if (a) { const x = 3; var x;}");
assertRedeclareError(
"""
function f() {
const x = 3;
if (a) {
var x;
}
}
""");
}
@Test
public void testIllegalConstShadowing_withES6Modules() {
assertRedeclareError(
"""
export function f() {
const x = 3;
if (a) {
var x;
}
}
""");
}
@Test
public void testVarShadowing() {
assertRedeclareGlobal("if (a) { var x; var x;}");
assertRedeclareError("if (a) { var x; let x;}");
assertRedeclare("function f() { var x; if (a) { var x; }}");
assertRedeclareError("function f() { if (a) { var x; } let x;}");
assertNoWarning("function f() { var x; if (a) { let x; }}");
assertNoWarning(
"""
function f() {
if (a) { var x; }
if (b) { let x; }
}
""");
}
@Test
public void testVarShadowing_withES6Modules01() {
assertRedeclare("export function f() { var x; if (a) { var x; }}");
}
@Test
public void testVarShadowing_withES6Modules02() {
assertRedeclareError("export function f() { if (a) { var x; } let x;}");
}
@Test
public void testVarShadowing_withES6Modules03() {
assertNoWarning("export function f() { var x; if (a) { let x; }}");
}
@Test
public void testVarShadowing_withES6Modules04() {
assertNoWarning(
"""
function f() {
if (a) { var x; }
if (b) { let x; }
}
""");
}
@Test
public void testParameterShadowing() {
assertRedeclareError("function f(x) { let x; }");
assertRedeclareError("function f(x) { const x = 3; }");
assertRedeclareError("function f(X) { class X {} }");
assertRedeclare("function f(x) { function x() {} }");
assertRedeclare("function f(x) { var x; }");
assertRedeclare("function f(x=3) { var x; }");
assertNoWarning("function f(...x) {}");
assertRedeclare("function f(...x) { var x; }");
assertRedeclare("function f(...x) { function x() {} }");
assertRedeclare("function f(x=3) { function x() {} }");
assertNoWarning("function f(x) { if (true) { let x; } }");
assertNoWarning(
"""
function outer(x) {
function inner() {
let x = 1;
}
}
""");
assertNoWarning(
"""
function outer(x) {
function inner() {
var x = 1;
}
}
""");
assertRedeclare("function f({a, b}) { var a = 2 }");
assertRedeclare("function f({a, b}) { if (!a) var a = 6; }");
}
@Test
public void testParameterShadowing_withES6Modules() {
assertRedeclareError("export function f(x) { let x; }");
assertRedeclare("export function f(x) { function x() {} }");
assertRedeclare("export function f(x=3) { var x; }");
assertNoWarning("export function f(...x) {}");
assertNoWarning(
"""
export function outer(x) {
function inner() {
var x = 1;
}
}
""");
}
@Test
public void testReassignedConst() {
assertReassign("const a = 0; a = 1;");
assertReassign("const a = 0; a++;");
}
@Test
public void testLogicalReassignedConst() {
// These patterns are normalized away
assertReassign("const a = 0; a ||= 1;");
assertReassign("const a = 1; a &&= 1;");
assertReassign("const a = null; a ??= 1;");
}
@Test
public void testLetConstNotDirectlyInBlock() {
testSame("if (true) var x = 3;");
testError("if (true) let x = 3;", DECLARATION_NOT_DIRECTLY_IN_BLOCK);
testError("if (true) const x = 3;", DECLARATION_NOT_DIRECTLY_IN_BLOCK);
testError("if (true) class C {}", DECLARATION_NOT_DIRECTLY_IN_BLOCK);
testError("if (true) function f() {}", DECLARATION_NOT_DIRECTLY_IN_BLOCK);
}
@Test
public void testFunctionHoisting() {
assertEarlyReferenceWarning("if (true) { f(); function f() {} }");
}
@Test
public void testFunctionHoistingRedeclaration1() {
String[] js = {
"var x;", "function x() {}",
};
String message = "Variable x declared more than once. First occurrence: testcode0:1:4";
testError(srcs(js), error(VarCheck.VAR_MULTIPLY_DECLARED_ERROR).withMessage(message));
}
@Test
public void testFunctionHoistingRedeclaration2() {
String[] js = {
"function x() {}", "var x;",
};
String message = "Variable x declared more than once. First occurrence: testcode0:1:9";
testError(srcs(js), error(VarCheck.VAR_MULTIPLY_DECLARED_ERROR).withMessage(message));
}
@Test
public void testArrowFunction() {
assertNoWarning("var f = x => { return x+1; };");
assertNoWarning("var odds = [1,2,3,4].filter((n) => n%2 == 1)");
assertRedeclare("var f = x => {var x;}");
assertRedeclareError("var f = x => {let x;}");
}
@Test
public void testArrowFunction_withES6Modules() {
assertNoWarning("export var f = x => { return x+1; };");
assertRedeclare("export var f = x => {var x;}");
assertRedeclareError("export var f = x => {let x;}");
}
@Test
public void testTryCatch() {
assertRedeclareError(
"""
function f() {
try {
let e = 0;
if (true) {
let e = 1;
}
} catch (e) {
let e;
}
}
""");
assertRedeclareError(
"""
function f() {
try {
let e = 0;
if (true) {
let e = 1;
}
} catch (e) {
var e;
}
}
""");
assertRedeclareError(
"""
function f() {
try {
let e = 0;
if (true) {
let e = 1;
}
} catch (e) {
function e() {
var e;
}
}
}
""");
}
@Test
public void testTryCatch_withES6Modules() {
assertRedeclareError(
"""
export function f() {
try {
let e = 0;
if (true) {
let e = 1;
}
} catch (e) {
let e;
}
}
""");
}
@Test
public void testClass() {
assertNoWarning("class A { f() { return 1729; } }");
}
@Test
public void testClass_withES6Modules() {
assertNoWarning("export class A { f() { return 1729; } }");
}
@Test
public void testRedeclareClassName() {
assertNoWarning("var Clazz = class Foo {}; var Foo = 3;");
}
@Test
public void testRedeclareClassName_withES6Modules() {
assertNoWarning("export var Clazz = class Foo {}; var Foo = 3;");
}
@Test
public void testClassExtend() {
assertNoWarning("class A {} class C extends A {} C = class extends A {}");
}
@Test
public void testClassExtend_withES6Modules() {
assertNoWarning("export class A {} class C extends A {} C = class extends A {}");
}
/** Variable reference before declaration error should not appear for non-static public fields */
@Test
public void testNonStaticPublicFields() {
assertNoWarning("class Foo { x = bar;} let bar = 1;");
assertNoWarning("class Foo { x = Enum.X; } const Enum = { X: 1 }");
assertNoWarning("class Foo { x = new Bar(); } class Bar {}");
}
@Test
public void testStaticPublicFields() {
assertEarlyReferenceError("class Bar { static x = y; } const y = 3;");
assertEarlyReferenceError("class Foo { static x = new Bar(); } class Bar {}");
assertEarlyReferenceError("class Bar { static x = Enum.A; } let Enum = { A: 'str' }");
}
@Test
public void testArrayPattern() {
assertNoWarning("var [a] = [1];");
assertNoWarning("var [a, b] = [1, 2];");
assertEarlyReferenceWarning("alert(a); var [a] = [1];");
assertEarlyReferenceWarning("alert(b); var [a, b] = [1, 2];");
assertEarlyReferenceWarning("[a] = [1]; var a;");
assertEarlyReferenceWarning("[a, b] = [1]; var b;");
}
@Test
public void testArrayPattern_withES6Modules01() {
assertNoWarning("export var [a] = [1];");
}
@Test
public void testArrayPattern_defaultValue() {
assertNoWarning("var [a = 1] = [2];");
assertNoWarning("var [a = 1] = [];");
assertEarlyReferenceWarning("alert(a); var [a = 1] = [2];");
assertEarlyReferenceWarning("alert(a); var [a = 1] = [];");
assertEarlyReferenceWarning("alert(a); var [a = b] = [1];");
assertEarlyReferenceWarning("alert(a); var [a = b] = [];");
}
@Test
public void testArrayPattern_defaultValue_withES6Modules01() {
assertNoWarning("export var [a = 1] = [2];");
}
@Test
public void testObjectPattern() {
assertNoWarning("var {a: b} = {a: 1};");
assertNoWarning("var {a: b} = {};");
assertNoWarning("var {a} = {a: 1};");
// 'a' is not declared at all, so the 'a' passed to alert() references
// the global variable 'a', and there is no warning.
assertNoWarning("alert(a); var {a: b} = {};");
assertNoWarning("alert(a); var {a: {a: b}} = {};");
assertEarlyReferenceWarning("alert(b); var {a: b} = {a: 1};");
assertEarlyReferenceWarning("alert(a); var {a} = {a: 1};");
assertEarlyReferenceWarning("({a: b} = {}); var a, b;");
}
@Test
public void testObjectPatternRest() {
assertNoWarning("var {a: b, ...r} = {a: 1};");
assertNoWarning("var {a: b, ...r} = {};");
assertNoWarning("var {a, ...r} = {a: 1};");
assertNoWarning("alert(r);");
assertEarlyReferenceWarning("alert(r); var {...r} = {a: 1};");
assertNoWarning("({...a} = {});");
assertEarlyReferenceWarning("({...a} = {}); var a;");
}
@Test
public void testObjectPattern_withES6Modules01() {
assertNoWarning("export var {a: b} = {a: 1};");
}
@Test
public void testObjectPattern_defaultValue() {
assertEarlyReferenceWarning("alert(b); var {a: b = c} = {a: 1};");
assertEarlyReferenceWarning("alert(b); var c; var {a: b = c} = {a: 1};");
assertEarlyReferenceWarning("var {a: b = c} = {a: 1}; var c;");
assertEarlyReferenceWarning("alert(b); var {a: b = c} = {};");
assertEarlyReferenceWarning("alert(a); var {a = c} = {a: 1};");
assertEarlyReferenceWarning("alert(a); var {a = c} = {};");
}
@Test
public void testObjectPattern_defaultValue_withES6Modules() {
assertEarlyReferenceWarning("export var {a: b = c} = {a: 1}; var c;");
}
/** We can't catch all possible runtime errors but it's useful to have some basic checks. */
@Test
public void testDefaultParam() {
assertEarlyReferenceError("function f(x=a) { let a; }");
assertEarlyReferenceError(
"""
function f(x=a) { let a; }
function g(x=1) { var a; }
""");
assertEarlyReferenceError("function f(x=a) { var a; }");
assertEarlyReferenceError("function f(x=a()) { function a() {} }");
assertEarlyReferenceError("function f(x=[a]) { var a; }");
assertEarlyReferenceError("function f(x={a}) { let a; }");
assertEarlyReferenceError("function f(x=y, y=2) {}");
assertEarlyReferenceError("function f(x={y}, y=2) {}");
assertEarlyReferenceError("function f(x=x) {}");
assertEarlyReferenceError("function f([x]=x) {}");
// x within a function isn't referenced at the time the default value for x is evaluated.
assertNoWarning("function f(x=()=>x) {}");
assertNoWarning("function f(x=a) {}");
assertNoWarning("function f(x=a) {} var a;");
assertNoWarning("let b; function f(x=b) { var b; }");
assertNoWarning("function f(y = () => x, x = 5) { return y(); }");
assertNoWarning("function f(x = new foo.bar()) {}");
assertNoWarning("var foo = {}; foo.bar = class {}; function f(x = new foo.bar()) {}");
}
@Test
public void testDefaultParam_withES6Modules() {
assertEarlyReferenceError("export function f(x=a) { let a; }");
assertNoWarning("export function f(x=()=>x) {}");
}
@Test
public void testDestructuring() {
testSame(
"""
function f() {
var obj = {a:1, b:2};
var {a:c, b:d} = obj;
}
""");
testSame(
"""
function f() {
var obj = {a:1, b:2};
var {a, b} = obj;
}
""");
assertRedeclare(
"""
function f() {
var obj = {a:1, b:2};
var {a:c, b:d} = obj;
var c = b;
}
""");
assertEarlyReferenceWarning(
"""
function f() {
var {a:c, b:d} = obj;
var obj = {a:1, b:2};
}
""");
assertEarlyReferenceWarning(
"""
function f() {
var {a, b} = obj;
var obj = {a:1, b:2};
}
""");
assertEarlyReferenceWarning(
"""
function f() {
var e = c;
var {a:c, b:d} = {a:1, b:2};
}
""");
}
@Test
public void testDestructuring_withES6Modules() {
testSame(
"""
export function f() {
var obj = {a:1, b:2};
var {a:c, b:d} = obj;
}
""");
assertRedeclare(
"""
export function f() {
var obj = {a:1, b:2};
var {a:c, b:d} = obj;
var c = b;
}
""");
assertEarlyReferenceWarning(
"""
export function f() {
var {a:c, b:d} = obj;
var obj = {a:1, b:2};
}
""");
}
@Test
public void testDestructuringInLoop() {
testSame("for (let {length: x} in obj) {}");
testSame("for (let [{length: z}, w] in obj) {}");
}
@Test
public void testReferencingPreviouslyDeclaredVariableInConst() {
testSame("const [a, b = a] = [];");
// TODO(b/111441110): don't error on this. it's valid code.
assertEarlyReferenceError("for (const [a, b = a] of []);");
}
@Test
public void testEarlyReferenceInInnerBlock() {
assertEarlyReferenceError("for (x of [1, 2, 3]) {} let x;");
assertEarlyReferenceError("{ x; } let x;");
assertEarlyReferenceError("{ C; } class C {}");
assertEarlyReferenceWarning("{ x; } var x;");
}
@Test
public void testEarlyVariableReferenceInsideFunction() {
testSame("function f() { x; } let x; f(); ");
testSame("function f() { const f = () => x; let x = 3; return f; }");
// NOTE: this will cause an error at runtime, but we don't report it because we don't track
// where `f` is being called.
testSame("function f() { x; } f(); let x;");
testSame("function f() { x; } f(); var x;");
}
@Test
public void testEnhancedForLoopTemporalDeadZone() {
assertEarlyReferenceError("for (let x of [x]);");
assertEarlyReferenceError("for (let x in [x]);");
assertEarlyReferenceError("for (const x of [x]);");
testSame("for (var x of [x]);");
testSame("for (let x of [() => x]);");
testSame("let x = 1; for (let y of [x]);");
}
@Test
public void testEnhancedForLoopTemporalDeadZone_withES6Modules() {
testSame("export let x = 1; for (let y of [x]);");
}
@Test
public void testRedeclareVariableFromImport() {
ignoreWarnings(ModuleLoader.INVALID_MODULE_PATH);
assertRedeclareError("import {x} from 'whatever'; let x = 0;");
assertRedeclareError("import {x} from 'whatever'; const x = 0;");
assertRedeclareError("import {x} from 'whatever'; var x = 0;");
assertRedeclareError("import {x} from 'whatever'; function x() {}");
assertRedeclareError("import {x} from 'whatever'; class x {}");
assertRedeclareError("import x from 'whatever'; let x = 0;");
assertRedeclareError("import * as ns from 'whatever'; let ns = 0;");
assertRedeclareError("import {y as x} from 'whatever'; let x = 0;");
assertRedeclareError("import {x} from 'whatever'; let {x} = {};");
assertRedeclareError("import {x} from 'whatever'; let [x] = [];");
assertRedeclareError("import {x, x} from 'whatever';");
assertRedeclareError("import {x, y as x} from 'whatever';");
assertRedeclareError("import {z as x, y as x} from 'whatever';");
assertRedeclareError("import {x} from 'first'; import {x} from 'second';");
assertRedeclareError("import {x} from 'first'; import {a as x} from 'second';");
assertRedeclareError("import {b as x} from 'first'; import {a as x} from 'second';");
testSame("import {x} from 'whatever'; function f() { let x = 0; }");
testSame("import {x as x} from 'whatever'; function f() { let x = 0; }");
testSame("import {y as x} from 'whatever'; function f() { let x = 0; }");
}
@Test
public void testOkExportsRefInGoogModule() {
testSame("goog.module('m');");
testSame("goog.module('m'); exports.Foo = 0; exports.Bar = 0;");
testSame("goog.module('m'); exports = 0;");
testSame("goog.module('m'); exports = class {}; exports.Foo = class {};");
testSame("goog.module('m'); function f() { exports = 0; }"); // Bad style but warn elsewhere
testSame("goog.module('m'); function f() { return exports; } exports = 1;");
}
@Test
public void testBadEarlyExportsRefInGoogModule() {
testError("goog.module('m'); exports.x = 0; exports = {};", EARLY_EXPORTS_REFERENCE);
testError("goog.module('m'); exports.x = 0; exports = class Bar {};", EARLY_EXPORTS_REFERENCE);
testError(
"goog.module('m'); /** @typedef {string} */ exports.x; exports = {};",
EARLY_EXPORTS_REFERENCE);
}
@Test
public void testReferenceInSwitchCondition_shadowedInSwitchBody() {
assertNoWarning(
"""
const x = 0;
switch (x) {
case 0:
let x;
break;
default: break;
}
""");
}
/** Expects the JS to generate one bad-read error. */
private void assertRedeclare(String js) {
testWarning(js, REDECLARED_VARIABLE);
}
private void assertRedeclareError(String js) {
testError(js, REDECLARED_VARIABLE_ERROR);
}
private void assertReassign(String js) {
testError(js, REASSIGNED_CONSTANT);
}
private void assertRedeclareGlobal(String js) {
testError(js, VarCheck.VAR_MULTIPLY_DECLARED_ERROR);
}
/** Expects the JS to generate one bad-write warning. */
private void assertEarlyReferenceWarning(String js) {
testWarning(js, EARLY_REFERENCE);
}
private void assertEarlyReferenceError(String js) {
testError(js, EARLY_REFERENCE_ERROR);
}
/** Expects the JS to generate no errors or warnings. */
private void assertNoWarning(String js) {
testSame(js);
}
}
|
openjdk/jdk8 | 36,730 | langtools/src/share/classes/com/sun/tools/javap/JavapTask.java | /*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.javap;
import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.FilterInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URI;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.NestingKind;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticListener;
import javax.tools.JavaFileManager;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.StandardLocation;
import com.sun.tools.classfile.*;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
/**
* "Main" class for javap, normally accessed from the command line
* via Main, or from JSR199 via DisassemblerTool.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public class JavapTask implements DisassemblerTool.DisassemblerTask, Messages {
public class BadArgs extends Exception {
static final long serialVersionUID = 8765093759964640721L;
BadArgs(String key, Object... args) {
super(JavapTask.this.getMessage(key, args));
this.key = key;
this.args = args;
}
BadArgs showUsage(boolean b) {
showUsage = b;
return this;
}
final String key;
final Object[] args;
boolean showUsage;
}
static abstract class Option {
Option(boolean hasArg, String... aliases) {
this.hasArg = hasArg;
this.aliases = aliases;
}
boolean matches(String opt) {
for (String a: aliases) {
if (a.equals(opt))
return true;
}
return false;
}
boolean ignoreRest() {
return false;
}
abstract void process(JavapTask task, String opt, String arg) throws BadArgs;
final boolean hasArg;
final String[] aliases;
}
static final Option[] recognizedOptions = {
new Option(false, "-help", "--help", "-?") {
void process(JavapTask task, String opt, String arg) {
task.options.help = true;
}
},
new Option(false, "-version") {
void process(JavapTask task, String opt, String arg) {
task.options.version = true;
}
},
new Option(false, "-fullversion") {
void process(JavapTask task, String opt, String arg) {
task.options.fullVersion = true;
}
},
new Option(false, "-v", "-verbose", "-all") {
void process(JavapTask task, String opt, String arg) {
task.options.verbose = true;
task.options.showDescriptors = true;
task.options.showFlags = true;
task.options.showAllAttrs = true;
}
},
new Option(false, "-l") {
void process(JavapTask task, String opt, String arg) {
task.options.showLineAndLocalVariableTables = true;
}
},
new Option(false, "-public") {
void process(JavapTask task, String opt, String arg) {
task.options.accessOptions.add(opt);
task.options.showAccess = AccessFlags.ACC_PUBLIC;
}
},
new Option(false, "-protected") {
void process(JavapTask task, String opt, String arg) {
task.options.accessOptions.add(opt);
task.options.showAccess = AccessFlags.ACC_PROTECTED;
}
},
new Option(false, "-package") {
void process(JavapTask task, String opt, String arg) {
task.options.accessOptions.add(opt);
task.options.showAccess = 0;
}
},
new Option(false, "-p", "-private") {
void process(JavapTask task, String opt, String arg) {
if (!task.options.accessOptions.contains("-p") &&
!task.options.accessOptions.contains("-private")) {
task.options.accessOptions.add(opt);
}
task.options.showAccess = AccessFlags.ACC_PRIVATE;
}
},
new Option(false, "-c") {
void process(JavapTask task, String opt, String arg) {
task.options.showDisassembled = true;
}
},
new Option(false, "-s") {
void process(JavapTask task, String opt, String arg) {
task.options.showDescriptors = true;
}
},
// new Option(false, "-all") {
// void process(JavapTask task, String opt, String arg) {
// task.options.showAllAttrs = true;
// }
// },
new Option(false, "-h") {
void process(JavapTask task, String opt, String arg) throws BadArgs {
throw task.new BadArgs("err.h.not.supported");
}
},
new Option(false, "-verify", "-verify-verbose") {
void process(JavapTask task, String opt, String arg) throws BadArgs {
throw task.new BadArgs("err.verify.not.supported");
}
},
new Option(false, "-sysinfo") {
void process(JavapTask task, String opt, String arg) {
task.options.sysInfo = true;
}
},
new Option(false, "-Xold") {
void process(JavapTask task, String opt, String arg) throws BadArgs {
task.log.println(task.getMessage("warn.Xold.not.supported"));
}
},
new Option(false, "-Xnew") {
void process(JavapTask task, String opt, String arg) throws BadArgs {
// ignore: this _is_ the new version
}
},
new Option(false, "-XDcompat") {
void process(JavapTask task, String opt, String arg) {
task.options.compat = true;
}
},
new Option(false, "-XDdetails") {
void process(JavapTask task, String opt, String arg) {
task.options.details = EnumSet.allOf(InstructionDetailWriter.Kind.class);
}
},
new Option(false, "-XDdetails:") {
@Override
boolean matches(String opt) {
int sep = opt.indexOf(":");
return sep != -1 && super.matches(opt.substring(0, sep + 1));
}
void process(JavapTask task, String opt, String arg) throws BadArgs {
int sep = opt.indexOf(":");
for (String v: opt.substring(sep + 1).split("[,: ]+")) {
if (!handleArg(task, v))
throw task.new BadArgs("err.invalid.arg.for.option", v);
}
}
boolean handleArg(JavapTask task, String arg) {
if (arg.length() == 0)
return true;
if (arg.equals("all")) {
task.options.details = EnumSet.allOf(InstructionDetailWriter.Kind.class);
return true;
}
boolean on = true;
if (arg.startsWith("-")) {
on = false;
arg = arg.substring(1);
}
for (InstructionDetailWriter.Kind k: InstructionDetailWriter.Kind.values()) {
if (arg.equalsIgnoreCase(k.option)) {
if (on)
task.options.details.add(k);
else
task.options.details.remove(k);
return true;
}
}
return false;
}
},
new Option(false, "-constants") {
void process(JavapTask task, String opt, String arg) {
task.options.showConstants = true;
}
},
new Option(false, "-XDinner") {
void process(JavapTask task, String opt, String arg) {
task.options.showInnerClasses = true;
}
},
new Option(false, "-XDindent:") {
@Override
boolean matches(String opt) {
int sep = opt.indexOf(":");
return sep != -1 && super.matches(opt.substring(0, sep + 1));
}
void process(JavapTask task, String opt, String arg) throws BadArgs {
int sep = opt.indexOf(":");
try {
task.options.indentWidth = Integer.valueOf(opt.substring(sep + 1));
} catch (NumberFormatException e) {
}
}
},
new Option(false, "-XDtab:") {
@Override
boolean matches(String opt) {
int sep = opt.indexOf(":");
return sep != -1 && super.matches(opt.substring(0, sep + 1));
}
void process(JavapTask task, String opt, String arg) throws BadArgs {
int sep = opt.indexOf(":");
try {
task.options.tabColumn = Integer.valueOf(opt.substring(sep + 1));
} catch (NumberFormatException e) {
}
}
}
};
public JavapTask() {
context = new Context();
context.put(Messages.class, this);
options = Options.instance(context);
attributeFactory = new Attribute.Factory();
}
public JavapTask(Writer out,
JavaFileManager fileManager,
DiagnosticListener<? super JavaFileObject> diagnosticListener) {
this();
this.log = getPrintWriterForWriter(out);
this.fileManager = fileManager;
this.diagnosticListener = diagnosticListener;
}
public JavapTask(Writer out,
JavaFileManager fileManager,
DiagnosticListener<? super JavaFileObject> diagnosticListener,
Iterable<String> options,
Iterable<String> classes) {
this(out, fileManager, diagnosticListener);
this.classes = new ArrayList<String>();
for (String classname: classes) {
classname.getClass(); // null-check
this.classes.add(classname);
}
try {
if (options != null)
handleOptions(options, false);
} catch (BadArgs e) {
throw new IllegalArgumentException(e.getMessage());
}
}
public void setLocale(Locale locale) {
if (locale == null)
locale = Locale.getDefault();
task_locale = locale;
}
public void setLog(Writer log) {
this.log = getPrintWriterForWriter(log);
}
public void setLog(OutputStream s) {
setLog(getPrintWriterForStream(s));
}
private static PrintWriter getPrintWriterForStream(OutputStream s) {
return new PrintWriter(s == null ? System.err : s, true);
}
private static PrintWriter getPrintWriterForWriter(Writer w) {
if (w == null)
return getPrintWriterForStream(null);
else if (w instanceof PrintWriter)
return (PrintWriter) w;
else
return new PrintWriter(w, true);
}
public void setDiagnosticListener(DiagnosticListener<? super JavaFileObject> dl) {
diagnosticListener = dl;
}
public void setDiagnosticListener(OutputStream s) {
setDiagnosticListener(getDiagnosticListenerForStream(s));
}
private DiagnosticListener<JavaFileObject> getDiagnosticListenerForStream(OutputStream s) {
return getDiagnosticListenerForWriter(getPrintWriterForStream(s));
}
private DiagnosticListener<JavaFileObject> getDiagnosticListenerForWriter(Writer w) {
final PrintWriter pw = getPrintWriterForWriter(w);
return new DiagnosticListener<JavaFileObject> () {
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
switch (diagnostic.getKind()) {
case ERROR:
pw.print(getMessage("err.prefix"));
break;
case WARNING:
pw.print(getMessage("warn.prefix"));
break;
case NOTE:
pw.print(getMessage("note.prefix"));
break;
}
pw.print(" ");
pw.println(diagnostic.getMessage(null));
}
};
}
/** Result codes.
*/
static final int
EXIT_OK = 0, // Compilation completed with no errors.
EXIT_ERROR = 1, // Completed but reported errors.
EXIT_CMDERR = 2, // Bad command-line arguments
EXIT_SYSERR = 3, // System error or resource exhaustion.
EXIT_ABNORMAL = 4; // Compiler terminated abnormally
int run(String[] args) {
try {
handleOptions(args);
// the following gives consistent behavior with javac
if (classes == null || classes.size() == 0) {
if (options.help || options.version || options.fullVersion)
return EXIT_OK;
else
return EXIT_CMDERR;
}
try {
return run();
} finally {
if (defaultFileManager != null) {
try {
defaultFileManager.close();
defaultFileManager = null;
} catch (IOException e) {
throw new InternalError(e);
}
}
}
} catch (BadArgs e) {
reportError(e.key, e.args);
if (e.showUsage) {
log.println(getMessage("main.usage.summary", progname));
}
return EXIT_CMDERR;
} catch (InternalError e) {
Object[] e_args;
if (e.getCause() == null)
e_args = e.args;
else {
e_args = new Object[e.args.length + 1];
e_args[0] = e.getCause();
System.arraycopy(e.args, 0, e_args, 1, e.args.length);
}
reportError("err.internal.error", e_args);
return EXIT_ABNORMAL;
} finally {
log.flush();
}
}
public void handleOptions(String[] args) throws BadArgs {
handleOptions(Arrays.asList(args), true);
}
private void handleOptions(Iterable<String> args, boolean allowClasses) throws BadArgs {
if (log == null) {
log = getPrintWriterForStream(System.out);
if (diagnosticListener == null)
diagnosticListener = getDiagnosticListenerForStream(System.err);
} else {
if (diagnosticListener == null)
diagnosticListener = getDiagnosticListenerForWriter(log);
}
if (fileManager == null)
fileManager = getDefaultFileManager(diagnosticListener, log);
Iterator<String> iter = args.iterator();
boolean noArgs = !iter.hasNext();
while (iter.hasNext()) {
String arg = iter.next();
if (arg.startsWith("-"))
handleOption(arg, iter);
else if (allowClasses) {
if (classes == null)
classes = new ArrayList<String>();
classes.add(arg);
while (iter.hasNext())
classes.add(iter.next());
} else
throw new BadArgs("err.unknown.option", arg).showUsage(true);
}
if (!options.compat && options.accessOptions.size() > 1) {
StringBuilder sb = new StringBuilder();
for (String opt: options.accessOptions) {
if (sb.length() > 0)
sb.append(" ");
sb.append(opt);
}
throw new BadArgs("err.incompatible.options", sb);
}
if ((classes == null || classes.size() == 0) &&
!(noArgs || options.help || options.version || options.fullVersion)) {
throw new BadArgs("err.no.classes.specified");
}
if (noArgs || options.help)
showHelp();
if (options.version || options.fullVersion)
showVersion(options.fullVersion);
}
private void handleOption(String name, Iterator<String> rest) throws BadArgs {
for (Option o: recognizedOptions) {
if (o.matches(name)) {
if (o.hasArg) {
if (rest.hasNext())
o.process(this, name, rest.next());
else
throw new BadArgs("err.missing.arg", name).showUsage(true);
} else
o.process(this, name, null);
if (o.ignoreRest()) {
while (rest.hasNext())
rest.next();
}
return;
}
}
if (fileManager.handleOption(name, rest))
return;
throw new BadArgs("err.unknown.option", name).showUsage(true);
}
public Boolean call() {
return run() == 0;
}
public int run() {
if (classes == null || classes.isEmpty()) {
return EXIT_ERROR;
}
context.put(PrintWriter.class, log);
ClassWriter classWriter = ClassWriter.instance(context);
SourceWriter sourceWriter = SourceWriter.instance(context);
sourceWriter.setFileManager(fileManager);
attributeFactory.setCompat(options.compat);
int result = EXIT_OK;
for (String className: classes) {
try {
result = writeClass(classWriter, className);
} catch (ConstantPoolException e) {
reportError("err.bad.constant.pool", className, e.getLocalizedMessage());
result = EXIT_ERROR;
} catch (EOFException e) {
reportError("err.end.of.file", className);
result = EXIT_ERROR;
} catch (FileNotFoundException e) {
reportError("err.file.not.found", e.getLocalizedMessage());
result = EXIT_ERROR;
} catch (IOException e) {
//e.printStackTrace();
Object msg = e.getLocalizedMessage();
if (msg == null) {
msg = e;
}
reportError("err.ioerror", className, msg);
result = EXIT_ERROR;
} catch (Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
t.printStackTrace(pw);
pw.close();
reportError("err.crash", t.toString(), sw.toString());
result = EXIT_ABNORMAL;
}
}
return result;
}
protected int writeClass(ClassWriter classWriter, String className)
throws IOException, ConstantPoolException {
JavaFileObject fo = open(className);
if (fo == null) {
reportError("err.class.not.found", className);
return EXIT_ERROR;
}
ClassFileInfo cfInfo = read(fo);
if (!className.endsWith(".class")) {
String cfName = cfInfo.cf.getName();
if (!cfName.replaceAll("[/$]", ".").equals(className.replaceAll("[/$]", "."))) {
reportWarning("warn.unexpected.class", className, cfName.replace('/', '.'));
}
}
write(cfInfo);
if (options.showInnerClasses) {
ClassFile cf = cfInfo.cf;
Attribute a = cf.getAttribute(Attribute.InnerClasses);
if (a instanceof InnerClasses_attribute) {
InnerClasses_attribute inners = (InnerClasses_attribute) a;
try {
int result = EXIT_OK;
for (int i = 0; i < inners.classes.length; i++) {
int outerIndex = inners.classes[i].outer_class_info_index;
ConstantPool.CONSTANT_Class_info outerClassInfo = cf.constant_pool.getClassInfo(outerIndex);
String outerClassName = outerClassInfo.getName();
if (outerClassName.equals(cf.getName())) {
int innerIndex = inners.classes[i].inner_class_info_index;
ConstantPool.CONSTANT_Class_info innerClassInfo = cf.constant_pool.getClassInfo(innerIndex);
String innerClassName = innerClassInfo.getName();
classWriter.println("// inner class " + innerClassName.replaceAll("[/$]", "."));
classWriter.println();
result = writeClass(classWriter, innerClassName);
if (result != EXIT_OK) return result;
}
}
return result;
} catch (ConstantPoolException e) {
reportError("err.bad.innerclasses.attribute", className);
return EXIT_ERROR;
}
} else if (a != null) {
reportError("err.bad.innerclasses.attribute", className);
return EXIT_ERROR;
}
}
return EXIT_OK;
}
protected JavaFileObject open(String className) throws IOException {
// for compatibility, first see if it is a class name
JavaFileObject fo = getClassFileObject(className);
if (fo != null)
return fo;
// see if it is an inner class, by replacing dots to $, starting from the right
String cn = className;
int lastDot;
while ((lastDot = cn.lastIndexOf(".")) != -1) {
cn = cn.substring(0, lastDot) + "$" + cn.substring(lastDot + 1);
fo = getClassFileObject(cn);
if (fo != null)
return fo;
}
if (!className.endsWith(".class"))
return null;
if (fileManager instanceof StandardJavaFileManager) {
StandardJavaFileManager sfm = (StandardJavaFileManager) fileManager;
fo = sfm.getJavaFileObjects(className).iterator().next();
if (fo != null && fo.getLastModified() != 0) {
return fo;
}
}
// see if it is a URL, and if so, wrap it in just enough of a JavaFileObject
// to suit javap's needs
if (className.matches("^[A-Za-z]+:.*")) {
try {
final URI uri = new URI(className);
final URL url = uri.toURL();
final URLConnection conn = url.openConnection();
return new JavaFileObject() {
public Kind getKind() {
return JavaFileObject.Kind.CLASS;
}
public boolean isNameCompatible(String simpleName, Kind kind) {
throw new UnsupportedOperationException();
}
public NestingKind getNestingKind() {
throw new UnsupportedOperationException();
}
public Modifier getAccessLevel() {
throw new UnsupportedOperationException();
}
public URI toUri() {
return uri;
}
public String getName() {
return url.toString();
}
public InputStream openInputStream() throws IOException {
return conn.getInputStream();
}
public OutputStream openOutputStream() throws IOException {
throw new UnsupportedOperationException();
}
public Reader openReader(boolean ignoreEncodingErrors) throws IOException {
throw new UnsupportedOperationException();
}
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
throw new UnsupportedOperationException();
}
public Writer openWriter() throws IOException {
throw new UnsupportedOperationException();
}
public long getLastModified() {
return conn.getLastModified();
}
public boolean delete() {
throw new UnsupportedOperationException();
}
};
} catch (URISyntaxException ignore) {
} catch (IOException ignore) {
}
}
return null;
}
public static class ClassFileInfo {
ClassFileInfo(JavaFileObject fo, ClassFile cf, byte[] digest, int size) {
this.fo = fo;
this.cf = cf;
this.digest = digest;
this.size = size;
}
public final JavaFileObject fo;
public final ClassFile cf;
public final byte[] digest;
public final int size;
}
public ClassFileInfo read(JavaFileObject fo) throws IOException, ConstantPoolException {
InputStream in = fo.openInputStream();
try {
SizeInputStream sizeIn = null;
MessageDigest md = null;
if (options.sysInfo || options.verbose) {
try {
md = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException ignore) {
}
in = new DigestInputStream(in, md);
in = sizeIn = new SizeInputStream(in);
}
ClassFile cf = ClassFile.read(in, attributeFactory);
byte[] digest = (md == null) ? null : md.digest();
int size = (sizeIn == null) ? -1 : sizeIn.size();
return new ClassFileInfo(fo, cf, digest, size);
} finally {
in.close();
}
}
public void write(ClassFileInfo info) {
ClassWriter classWriter = ClassWriter.instance(context);
if (options.sysInfo || options.verbose) {
classWriter.setFile(info.fo.toUri());
classWriter.setLastModified(info.fo.getLastModified());
classWriter.setDigest("MD5", info.digest);
classWriter.setFileSize(info.size);
}
classWriter.write(info.cf);
}
protected void setClassFile(ClassFile classFile) {
ClassWriter classWriter = ClassWriter.instance(context);
classWriter.setClassFile(classFile);
}
protected void setMethod(Method enclosingMethod) {
ClassWriter classWriter = ClassWriter.instance(context);
classWriter.setMethod(enclosingMethod);
}
protected void write(Attribute value) {
AttributeWriter attrWriter = AttributeWriter.instance(context);
ClassWriter classWriter = ClassWriter.instance(context);
ClassFile cf = classWriter.getClassFile();
attrWriter.write(cf, value, cf.constant_pool);
}
protected void write(Attributes attrs) {
AttributeWriter attrWriter = AttributeWriter.instance(context);
ClassWriter classWriter = ClassWriter.instance(context);
ClassFile cf = classWriter.getClassFile();
attrWriter.write(cf, attrs, cf.constant_pool);
}
protected void write(ConstantPool constant_pool) {
ConstantWriter constantWriter = ConstantWriter.instance(context);
constantWriter.writeConstantPool(constant_pool);
}
protected void write(ConstantPool constant_pool, int value) {
ConstantWriter constantWriter = ConstantWriter.instance(context);
constantWriter.write(value);
}
protected void write(ConstantPool.CPInfo value) {
ConstantWriter constantWriter = ConstantWriter.instance(context);
constantWriter.println(value);
}
protected void write(Field value) {
ClassWriter classWriter = ClassWriter.instance(context);
classWriter.writeField(value);
}
protected void write(Method value) {
ClassWriter classWriter = ClassWriter.instance(context);
classWriter.writeMethod(value);
}
private JavaFileManager getDefaultFileManager(final DiagnosticListener<? super JavaFileObject> dl, PrintWriter log) {
if (defaultFileManager == null)
defaultFileManager = JavapFileManager.create(dl, log);
return defaultFileManager;
}
private JavaFileObject getClassFileObject(String className) throws IOException {
JavaFileObject fo;
fo = fileManager.getJavaFileForInput(StandardLocation.PLATFORM_CLASS_PATH, className, JavaFileObject.Kind.CLASS);
if (fo == null)
fo = fileManager.getJavaFileForInput(StandardLocation.CLASS_PATH, className, JavaFileObject.Kind.CLASS);
return fo;
}
private void showHelp() {
log.println(getMessage("main.usage", progname));
for (Option o: recognizedOptions) {
String name = o.aliases[0].substring(1); // there must always be at least one name
if (name.startsWith("X") || name.equals("fullversion") || name.equals("h") || name.equals("verify"))
continue;
log.println(getMessage("main.opt." + name));
}
String[] fmOptions = { "-classpath", "-cp", "-bootclasspath" };
for (String o: fmOptions) {
if (fileManager.isSupportedOption(o) == -1)
continue;
String name = o.substring(1);
log.println(getMessage("main.opt." + name));
}
}
private void showVersion(boolean full) {
log.println(version(full ? "full" : "release"));
}
private static final String versionRBName = "com.sun.tools.javap.resources.version";
private static ResourceBundle versionRB;
private String version(String key) {
// key=version: mm.nn.oo[-milestone]
// key=full: mm.mm.oo[-milestone]-build
if (versionRB == null) {
try {
versionRB = ResourceBundle.getBundle(versionRBName);
} catch (MissingResourceException e) {
return getMessage("version.resource.missing", System.getProperty("java.version"));
}
}
try {
return versionRB.getString(key);
}
catch (MissingResourceException e) {
return getMessage("version.unknown", System.getProperty("java.version"));
}
}
private void reportError(String key, Object... args) {
diagnosticListener.report(createDiagnostic(Diagnostic.Kind.ERROR, key, args));
}
private void reportNote(String key, Object... args) {
diagnosticListener.report(createDiagnostic(Diagnostic.Kind.NOTE, key, args));
}
private void reportWarning(String key, Object... args) {
diagnosticListener.report(createDiagnostic(Diagnostic.Kind.WARNING, key, args));
}
private Diagnostic<JavaFileObject> createDiagnostic(
final Diagnostic.Kind kind, final String key, final Object... args) {
return new Diagnostic<JavaFileObject>() {
public Kind getKind() {
return kind;
}
public JavaFileObject getSource() {
return null;
}
public long getPosition() {
return Diagnostic.NOPOS;
}
public long getStartPosition() {
return Diagnostic.NOPOS;
}
public long getEndPosition() {
return Diagnostic.NOPOS;
}
public long getLineNumber() {
return Diagnostic.NOPOS;
}
public long getColumnNumber() {
return Diagnostic.NOPOS;
}
public String getCode() {
return key;
}
public String getMessage(Locale locale) {
return JavapTask.this.getMessage(locale, key, args);
}
@Override
public String toString() {
return getClass().getName() + "[key=" + key + ",args=" + Arrays.asList(args) + "]";
}
};
}
public String getMessage(String key, Object... args) {
return getMessage(task_locale, key, args);
}
public String getMessage(Locale locale, String key, Object... args) {
if (bundles == null) {
// could make this a HashMap<Locale,SoftReference<ResourceBundle>>
// and for efficiency, keep a hard reference to the bundle for the task
// locale
bundles = new HashMap<Locale, ResourceBundle>();
}
if (locale == null)
locale = Locale.getDefault();
ResourceBundle b = bundles.get(locale);
if (b == null) {
try {
b = ResourceBundle.getBundle("com.sun.tools.javap.resources.javap", locale);
bundles.put(locale, b);
} catch (MissingResourceException e) {
throw new InternalError("Cannot find javap resource bundle for locale " + locale);
}
}
try {
return MessageFormat.format(b.getString(key), args);
} catch (MissingResourceException e) {
throw new InternalError(e, key);
}
}
protected Context context;
JavaFileManager fileManager;
JavaFileManager defaultFileManager;
PrintWriter log;
DiagnosticListener<? super JavaFileObject> diagnosticListener;
List<String> classes;
Options options;
//ResourceBundle bundle;
Locale task_locale;
Map<Locale, ResourceBundle> bundles;
protected Attribute.Factory attributeFactory;
private static final String progname = "javap";
private static class SizeInputStream extends FilterInputStream {
SizeInputStream(InputStream in) {
super(in);
}
int size() {
return size;
}
@Override
public int read(byte[] buf, int offset, int length) throws IOException {
int n = super.read(buf, offset, length);
if (n > 0)
size += n;
return n;
}
@Override
public int read() throws IOException {
int b = super.read();
size += 1;
return b;
}
private int size;
}
}
|
apache/hadoop | 36,897 | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AStoreImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.impl;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletionException;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import software.amazon.awssdk.core.ResponseInputStream;
import software.amazon.awssdk.core.exception.SdkException;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.s3.S3AsyncClient;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest;
import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadResponse;
import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
import software.amazon.awssdk.services.s3.model.DeleteObjectResponse;
import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest;
import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse;
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
import software.amazon.awssdk.services.s3.model.GetObjectResponse;
import software.amazon.awssdk.services.s3.model.HeadObjectRequest;
import software.amazon.awssdk.services.s3.model.HeadObjectResponse;
import software.amazon.awssdk.services.s3.model.ObjectIdentifier;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
import software.amazon.awssdk.services.s3.model.S3Error;
import software.amazon.awssdk.services.s3.model.UploadPartRequest;
import software.amazon.awssdk.services.s3.model.UploadPartResponse;
import software.amazon.awssdk.transfer.s3.S3TransferManager;
import software.amazon.awssdk.transfer.s3.model.CompletedFileUpload;
import software.amazon.awssdk.transfer.s3.model.FileUpload;
import software.amazon.awssdk.transfer.s3.model.UploadFileRequest;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StreamCapabilities;
import org.apache.hadoop.fs.s3a.Invoker;
import org.apache.hadoop.fs.s3a.ProgressableProgressListener;
import org.apache.hadoop.fs.s3a.Retries;
import org.apache.hadoop.fs.s3a.S3AInstrumentation;
import org.apache.hadoop.fs.s3a.S3AStorageStatistics;
import org.apache.hadoop.fs.s3a.S3AStore;
import org.apache.hadoop.fs.s3a.S3AUtils;
import org.apache.hadoop.fs.s3a.Statistic;
import org.apache.hadoop.fs.s3a.UploadInfo;
import org.apache.hadoop.fs.s3a.api.RequestFactory;
import org.apache.hadoop.fs.s3a.audit.AuditSpanS3A;
import org.apache.hadoop.fs.s3a.impl.streams.FactoryBindingParameters;
import org.apache.hadoop.fs.s3a.impl.streams.InputStreamType;
import org.apache.hadoop.fs.s3a.impl.streams.ObjectInputStream;
import org.apache.hadoop.fs.s3a.impl.streams.ObjectInputStreamFactory;
import org.apache.hadoop.fs.s3a.impl.streams.ObjectReadParameters;
import org.apache.hadoop.fs.s3a.impl.streams.StreamFactoryRequirements;
import org.apache.hadoop.fs.s3a.statistics.S3AStatisticsContext;
import org.apache.hadoop.fs.statistics.DurationTracker;
import org.apache.hadoop.fs.statistics.DurationTrackerFactory;
import org.apache.hadoop.fs.statistics.IOStatistics;
import org.apache.hadoop.fs.store.audit.AuditSpanSource;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.DurationInfo;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.RateLimiting;
import org.apache.hadoop.util.functional.Tuples;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.fs.s3a.Constants.BUFFER_DIR;
import static org.apache.hadoop.fs.s3a.Constants.HADOOP_TMP_DIR;
import static org.apache.hadoop.fs.s3a.S3AUtils.extractException;
import static org.apache.hadoop.fs.s3a.S3AUtils.getPutRequestLength;
import static org.apache.hadoop.fs.s3a.S3AUtils.isThrottleException;
import static org.apache.hadoop.fs.s3a.Statistic.ACTION_HTTP_HEAD_REQUEST;
import static org.apache.hadoop.fs.s3a.Statistic.IGNORED_ERRORS;
import static org.apache.hadoop.fs.s3a.Statistic.MULTIPART_UPLOAD_PART_PUT;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_BULK_DELETE_REQUEST;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_DELETE_OBJECTS;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_DELETE_REQUEST;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_METADATA_REQUESTS;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_PUT_BYTES;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_PUT_BYTES_PENDING;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_PUT_REQUESTS_ACTIVE;
import static org.apache.hadoop.fs.s3a.Statistic.OBJECT_PUT_REQUESTS_COMPLETED;
import static org.apache.hadoop.fs.s3a.Statistic.STORE_IO_RATE_LIMITED;
import static org.apache.hadoop.fs.s3a.Statistic.STORE_IO_RETRY;
import static org.apache.hadoop.fs.s3a.Statistic.STORE_IO_THROTTLED;
import static org.apache.hadoop.fs.s3a.Statistic.STORE_IO_THROTTLE_RATE;
import static org.apache.hadoop.fs.s3a.impl.ErrorTranslation.isObjectNotFound;
import static org.apache.hadoop.fs.s3a.impl.InternalConstants.DELETE_CONSIDERED_IDEMPOTENT;
import static org.apache.hadoop.fs.s3a.impl.streams.StreamIntegration.factoryFromConfig;
import static org.apache.hadoop.fs.statistics.StoreStatisticNames.ACTION_HTTP_GET_REQUEST;
import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDurationOfOperation;
import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDurationOfSupplier;
import static org.apache.hadoop.util.Preconditions.checkArgument;
import static org.apache.hadoop.util.StringUtils.toLowerCase;
/**
* Store Layer.
* This is where lower level storage operations are intended
* to move.
*/
public class S3AStoreImpl
extends CompositeService
implements S3AStore, ObjectInputStreamFactory {
private static final Logger LOG = LoggerFactory.getLogger(S3AStoreImpl.class);
/**
* Progress logger; fairly noisy.
*/
private static final Logger PROGRESS =
LoggerFactory.getLogger(InternalConstants.UPLOAD_PROGRESS_LOG_NAME);
/** Factory to create store contexts. */
private final StoreContextFactory storeContextFactory;
/** Source of the S3 clients. */
private final ClientManager clientManager;
/** The S3 bucket to communicate with. */
private final String bucket;
/** Request factory for creating requests. */
private final RequestFactory requestFactory;
/** Duration tracker factory. */
private final DurationTrackerFactory durationTrackerFactory;
/** The core instrumentation. */
private final S3AInstrumentation instrumentation;
/** Accessors to statistics for this FS. */
private final S3AStatisticsContext statisticsContext;
/** Storage Statistics Bonded to the instrumentation. */
private final S3AStorageStatistics storageStatistics;
/** Rate limiter for read operations. */
private final RateLimiting readRateLimiter;
/** Rate limiter for write operations. */
private final RateLimiting writeRateLimiter;
/** Store context. */
private final StoreContext storeContext;
/** Invoker for retry operations. */
private final Invoker invoker;
/** Audit span source. */
private final AuditSpanSource<AuditSpanS3A> auditSpanSource;
/**
* The original file system statistics: fairly minimal but broadly
* collected so it is important to pick up.
* This may be null.
*/
private final FileSystem.Statistics fsStatistics;
/**
* Allocator of local FS storage.
*/
private LocalDirAllocator directoryAllocator;
/**
* Factory for input streams.
*/
private ObjectInputStreamFactory objectInputStreamFactory;
/**
* Constructor to create S3A store.
* Package private, as {@link S3AStoreBuilder} creates them.
* */
S3AStoreImpl(StoreContextFactory storeContextFactory,
ClientManager clientManager,
DurationTrackerFactory durationTrackerFactory,
S3AInstrumentation instrumentation,
S3AStatisticsContext statisticsContext,
S3AStorageStatistics storageStatistics,
RateLimiting readRateLimiter,
RateLimiting writeRateLimiter,
AuditSpanSource<AuditSpanS3A> auditSpanSource,
@Nullable FileSystem.Statistics fsStatistics) {
super("S3AStore");
this.auditSpanSource = requireNonNull(auditSpanSource);
this.clientManager = requireNonNull(clientManager);
this.durationTrackerFactory = requireNonNull(durationTrackerFactory);
this.fsStatistics = fsStatistics;
this.instrumentation = requireNonNull(instrumentation);
this.statisticsContext = requireNonNull(statisticsContext);
this.storeContextFactory = requireNonNull(storeContextFactory);
this.storageStatistics = requireNonNull(storageStatistics);
this.readRateLimiter = requireNonNull(readRateLimiter);
this.writeRateLimiter = requireNonNull(writeRateLimiter);
this.storeContext = requireNonNull(storeContextFactory.createStoreContext());
this.invoker = requireNonNull(storeContext.getInvoker());
this.bucket = requireNonNull(storeContext.getBucket());
this.requestFactory = requireNonNull(storeContext.getRequestFactory());
addService(clientManager);
}
/**
* Create and initialize any subsidiary services, including the input stream factory.
* @param conf configuration
*/
@Override
protected void serviceInit(final Configuration conf) throws Exception {
// create and register the stream factory, which will
// then follow the service lifecycle
objectInputStreamFactory = factoryFromConfig(conf);
addService(objectInputStreamFactory);
// init all child services, including the stream factory
super.serviceInit(conf);
// pass down extra information to the stream factory.
finishStreamFactoryInit();
}
@Override
protected void serviceStart() throws Exception {
super.serviceStart();
initLocalDirAllocator();
}
/**
* Return the store path capabilities.
* If the object stream factory is non-null, hands off the
* query to that factory if not handled here.
* @param path path to query the capability of.
* @param capability non-null, non-empty string to query the path for support.
* @return known capabilities
*/
@Override
public boolean hasPathCapability(final Path path, final String capability) {
switch (toLowerCase(capability)) {
case StreamCapabilities.IOSTATISTICS:
return true;
default:
return inputStreamHasCapability(capability);
}
}
/**
* Return the capabilities of input streams created
* through the store.
* @param capability string to query the stream support for.
* @return capabilities declared supported in streams.
*/
@Override
public boolean inputStreamHasCapability(final String capability) {
if (objectInputStreamFactory != null) {
return objectInputStreamFactory.hasCapability(capability);
}
return false;
}
/**
* Initialize dir allocator if not already initialized.
*/
private void initLocalDirAllocator() {
String key = BUFFER_DIR;
if (StringUtils.isEmpty(getConfig().getTrimmed(key))) {
key = HADOOP_TMP_DIR;
}
directoryAllocator = new LocalDirAllocator(key);
}
/** Acquire write capacity for rate limiting {@inheritDoc}. */
@Override
public Duration acquireWriteCapacity(final int capacity) {
return writeRateLimiter.acquire(capacity);
}
/** Acquire read capacity for rate limiting {@inheritDoc}. */
@Override
public Duration acquireReadCapacity(final int capacity) {
return readRateLimiter.acquire(capacity);
}
/**
* Create a new store context.
* @return a new store context.
*/
private StoreContext createStoreContext() {
return storeContextFactory.createStoreContext();
}
@Override
public StoreContext getStoreContext() {
return storeContext;
}
/**
* Get the S3 client.
* @return the S3 client.
* @throws UncheckedIOException on any failure to create the client.
*/
private S3Client getS3Client() throws UncheckedIOException {
return clientManager.getOrCreateS3ClientUnchecked();
}
@Override
public S3TransferManager getOrCreateTransferManager() throws IOException {
return clientManager.getOrCreateTransferManager();
}
@Override
public S3Client getOrCreateS3Client() throws IOException {
return clientManager.getOrCreateS3Client();
}
@Override
public S3AsyncClient getOrCreateAsyncClient() throws IOException {
return clientManager.getOrCreateAsyncClient();
}
@Override
public S3Client getOrCreateS3ClientUnchecked() throws UncheckedIOException {
return clientManager.getOrCreateS3ClientUnchecked();
}
@Override
public S3Client getOrCreateAsyncS3ClientUnchecked() throws UncheckedIOException {
return clientManager.getOrCreateAsyncS3ClientUnchecked();
}
@Override
public S3Client getOrCreateUnencryptedS3Client() throws IOException {
return clientManager.getOrCreateUnencryptedS3Client();
}
@Override
public DurationTrackerFactory getDurationTrackerFactory() {
return durationTrackerFactory;
}
private S3AInstrumentation getInstrumentation() {
return instrumentation;
}
@Override
public S3AStatisticsContext getStatisticsContext() {
return statisticsContext;
}
private S3AStorageStatistics getStorageStatistics() {
return storageStatistics;
}
@Override
public RequestFactory getRequestFactory() {
return requestFactory;
}
/**
* Get the client manager.
* @return the client manager.
*/
@Override
public ClientManager clientManager() {
return clientManager;
}
/**
* Increment a statistic by 1.
* This increments both the instrumentation and storage statistics.
* @param statistic The operation to increment
*/
protected void incrementStatistic(Statistic statistic) {
incrementStatistic(statistic, 1);
}
/**
* Increment a statistic by a specific value.
* This increments both the instrumentation and storage statistics.
* @param statistic The operation to increment
* @param count the count to increment
*/
protected void incrementStatistic(Statistic statistic, long count) {
statisticsContext.incrementCounter(statistic, count);
}
/**
* Decrement a gauge by a specific value.
* @param statistic The operation to decrement
* @param count the count to decrement
*/
protected void decrementGauge(Statistic statistic, long count) {
statisticsContext.decrementGauge(statistic, count);
}
/**
* Increment a gauge by a specific value.
* @param statistic The operation to increment
* @param count the count to increment
*/
protected void incrementGauge(Statistic statistic, long count) {
statisticsContext.incrementGauge(statistic, count);
}
/**
* Callback when an operation was retried.
* Increments the statistics of ignored errors or throttled requests,
* depending up on the exception class.
* @param ex exception.
*/
public void operationRetried(Exception ex) {
if (isThrottleException(ex)) {
LOG.debug("Request throttled");
incrementStatistic(STORE_IO_THROTTLED);
statisticsContext.addValueToQuantiles(STORE_IO_THROTTLE_RATE, 1);
} else {
incrementStatistic(STORE_IO_RETRY);
incrementStatistic(IGNORED_ERRORS);
}
}
/**
* Callback from {@link Invoker} when an operation is retried.
* @param text text of the operation
* @param ex exception
* @param retries number of retries
* @param idempotent is the method idempotent
*/
public void operationRetried(String text, Exception ex, int retries, boolean idempotent) {
operationRetried(ex);
}
/**
* Get the instrumentation's IOStatistics.
* @return statistics
*/
@Override
public IOStatistics getIOStatistics() {
return instrumentation.getIOStatistics();
}
/**
* Increment read operations.
*/
@Override
public void incrementReadOperations() {
if (fsStatistics != null) {
fsStatistics.incrementReadOps(1);
}
}
/**
* Increment the write operation counter.
* This is somewhat inaccurate, as it appears to be invoked more
* often than needed in progress callbacks.
*/
@Override
public void incrementWriteOperations() {
if (fsStatistics != null) {
fsStatistics.incrementWriteOps(1);
}
}
/**
* Increment the bytes written statistic.
* @param bytes number of bytes written.
*/
private void incrementBytesWritten(final long bytes) {
if (fsStatistics != null) {
fsStatistics.incrementBytesWritten(bytes);
}
}
/**
* At the start of a put/multipart upload operation, update the
* relevant counters.
*
* @param bytes bytes in the request.
*/
@Override
public void incrementPutStartStatistics(long bytes) {
LOG.debug("PUT start {} bytes", bytes);
incrementWriteOperations();
incrementGauge(OBJECT_PUT_REQUESTS_ACTIVE, 1);
if (bytes > 0) {
incrementGauge(OBJECT_PUT_BYTES_PENDING, bytes);
}
}
/**
* At the end of a put/multipart upload operation, update the
* relevant counters and gauges.
*
* @param success did the operation succeed?
* @param bytes bytes in the request.
*/
@Override
public void incrementPutCompletedStatistics(boolean success, long bytes) {
LOG.debug("PUT completed success={}; {} bytes", success, bytes);
if (bytes > 0) {
incrementStatistic(OBJECT_PUT_BYTES, bytes);
decrementGauge(OBJECT_PUT_BYTES_PENDING, bytes);
}
incrementStatistic(OBJECT_PUT_REQUESTS_COMPLETED);
decrementGauge(OBJECT_PUT_REQUESTS_ACTIVE, 1);
}
/**
* Callback for use in progress callbacks from put/multipart upload events.
* Increments those statistics which are expected to be updated during
* the ongoing upload operation.
* @param key key to file that is being written (for logging)
* @param bytes bytes successfully uploaded.
*/
@Override
public void incrementPutProgressStatistics(String key, long bytes) {
PROGRESS.debug("PUT {}: {} bytes", key, bytes);
incrementWriteOperations();
if (bytes > 0) {
incrementBytesWritten(bytes);
}
}
/**
* Given a possibly null duration tracker factory, return a non-null
* one for use in tracking durations -either that or the FS tracker
* itself.
*
* @param factory factory.
* @return a non-null factory.
*/
@Override
public DurationTrackerFactory nonNullDurationTrackerFactory(
DurationTrackerFactory factory) {
return factory != null
? factory
: getDurationTrackerFactory();
}
/**
* Start an operation; this informs the audit service of the event
* and then sets it as the active span.
* @param operation operation name.
* @param path1 first path of operation
* @param path2 second path of operation
* @return a span for the audit
* @throws IOException failure
*/
public AuditSpanS3A createSpan(String operation, @Nullable String path1, @Nullable String path2)
throws IOException {
return auditSpanSource.createSpan(operation, path1, path2);
}
/**
* Reject any request to delete an object where the key is root.
* @param key key to validate
* @throws IllegalArgumentException if the request was rejected due to
* a mistaken attempt to delete the root directory.
*/
private void blockRootDelete(String key) throws IllegalArgumentException {
checkArgument(!key.isEmpty() && !"/".equals(key), "Bucket %s cannot be deleted", bucket);
}
/**
* {@inheritDoc}.
*/
@Override
@Retries.RetryRaw
public Map.Entry<Duration, DeleteObjectsResponse> deleteObjects(
final DeleteObjectsRequest deleteRequest)
throws SdkException {
DeleteObjectsResponse response;
BulkDeleteRetryHandler retryHandler = new BulkDeleteRetryHandler(createStoreContext());
final List<ObjectIdentifier> keysToDelete = deleteRequest.delete().objects();
int keyCount = keysToDelete.size();
if (LOG.isDebugEnabled()) {
LOG.debug("Initiating delete operation for {} objects", keysToDelete.size());
keysToDelete.stream().forEach(objectIdentifier -> {
LOG.debug(" \"{}\" {}", objectIdentifier.key(),
objectIdentifier.versionId() != null ? objectIdentifier.versionId() : "");
});
}
// block root calls
keysToDelete.stream().map(ObjectIdentifier::key).forEach(this::blockRootDelete);
try (DurationInfo d = new DurationInfo(LOG, false, "DELETE %d keys", keyCount)) {
response =
invoker.retryUntranslated("delete",
DELETE_CONSIDERED_IDEMPOTENT, (text, e, r, i) -> {
// handle the failure
retryHandler.bulkDeleteRetried(deleteRequest, e);
},
// duration is tracked in the bulk delete counters
trackDurationOfOperation(getDurationTrackerFactory(),
OBJECT_BULK_DELETE_REQUEST.getSymbol(), () -> {
// acquire the write capacity for the number of keys to delete
// and record the duration.
Duration durationToAcquireWriteCapacity = acquireWriteCapacity(keyCount);
instrumentation.recordDuration(STORE_IO_RATE_LIMITED,
true,
durationToAcquireWriteCapacity);
incrementStatistic(OBJECT_DELETE_OBJECTS, keyCount);
return getS3Client().deleteObjects(deleteRequest);
}));
if (!response.errors().isEmpty()) {
// one or more of the keys could not be deleted.
// log and then throw
List<S3Error> errors = response.errors();
if (LOG.isDebugEnabled()) {
LOG.debug("Partial failure of delete, {} errors", errors.size());
for (S3Error error : errors) {
LOG.debug("{}: \"{}\" - {}", error.key(), error.code(), error.message());
}
}
}
d.close();
return Tuples.pair(d.asDuration(), response);
} catch (IOException e) {
// convert to unchecked.
throw new UncheckedIOException(e);
}
}
/**
* Performs a HEAD request on an S3 object to retrieve its metadata.
*
* @param key The S3 object key to perform the HEAD operation on
* @param changeTracker Tracks changes to the object's metadata across operations
* @param changeInvoker The invoker responsible for executing the HEAD request with retries
* @param fsHandler Handler for filesystem-level operations and configurations
* @param operation Description of the operation being performed for tracking purposes
* @return HeadObjectResponse containing the object's metadata
* @throws IOException If the HEAD request fails, object doesn't exist, or other I/O errors occur
*/
@Override
@Retries.RetryRaw
public HeadObjectResponse headObject(String key,
ChangeTracker changeTracker,
Invoker changeInvoker,
S3AFileSystemOperations fsHandler,
String operation) throws IOException {
HeadObjectResponse response = getStoreContext().getInvoker()
.retryUntranslated("HEAD " + key, true,
() -> {
HeadObjectRequest.Builder requestBuilder =
getRequestFactory().newHeadObjectRequestBuilder(key);
incrementStatistic(OBJECT_METADATA_REQUESTS);
DurationTracker duration =
getDurationTrackerFactory().trackDuration(ACTION_HTTP_HEAD_REQUEST.getSymbol());
try {
LOG.debug("HEAD {} with change tracker {}", key, changeTracker);
if (changeTracker != null) {
changeTracker.maybeApplyConstraint(requestBuilder);
}
HeadObjectResponse headObjectResponse =
getS3Client().headObject(requestBuilder.build());
if (fsHandler != null) {
long length =
fsHandler.getS3ObjectSize(key, headObjectResponse.contentLength(), this,
headObjectResponse);
// overwrite the content length
headObjectResponse = headObjectResponse.toBuilder().contentLength(length).build();
}
if (changeTracker != null) {
changeTracker.processMetadata(headObjectResponse, operation);
}
return headObjectResponse;
} catch (AwsServiceException ase) {
if (!isObjectNotFound(ase)) {
// file not found is not considered a failure of the call,
// so only switch the duration tracker to update failure
// metrics on other exception outcomes.
duration.failed();
}
throw ase;
} finally {
// update the tracker.
duration.close();
}
});
incrementReadOperations();
return response;
}
/**
* Retrieves a specific byte range of an S3 object as a stream.
*
* @param key The S3 object key to retrieve
* @param start The starting byte position (inclusive) of the range to retrieve
* @param end The ending byte position (inclusive) of the range to retrieve
* @return A ResponseInputStream containing the requested byte range of the S3 object
* @throws IOException If the object cannot be retrieved other I/O errors occur
* @see GetObjectResponse For additional metadata about the retrieved object
*/
@Override
@Retries.RetryRaw
public ResponseInputStream<GetObjectResponse> getRangedS3Object(String key,
long start,
long end) throws IOException {
final GetObjectRequest request = getRequestFactory().newGetObjectRequestBuilder(key)
.range(S3AUtils.formatRange(start, end))
.build();
DurationTracker duration = getDurationTrackerFactory()
.trackDuration(ACTION_HTTP_GET_REQUEST);
ResponseInputStream<GetObjectResponse> objectRange;
try {
objectRange = getStoreContext().getInvoker()
.retryUntranslated("GET Ranged Object " + key, true,
() -> getS3Client().getObject(request));
} catch (IOException ex) {
duration.failed();
throw ex;
} finally {
duration.close();
}
return objectRange;
}
/**
* {@inheritDoc}.
*/
@Override
@Retries.RetryRaw
public Map.Entry<Duration, Optional<DeleteObjectResponse>> deleteObject(
final DeleteObjectRequest request)
throws SdkException {
String key = request.key();
blockRootDelete(key);
DurationInfo d = new DurationInfo(LOG, false, "deleting %s", key);
try {
DeleteObjectResponse response =
invoker.retryUntranslated(String.format("Delete %s:/%s", bucket, key),
DELETE_CONSIDERED_IDEMPOTENT,
trackDurationOfOperation(getDurationTrackerFactory(),
OBJECT_DELETE_REQUEST.getSymbol(), () -> {
incrementStatistic(OBJECT_DELETE_OBJECTS);
// We try to acquire write capacity just before delete call.
Duration durationToAcquireWriteCapacity = acquireWriteCapacity(1);
instrumentation.recordDuration(STORE_IO_RATE_LIMITED,
true, durationToAcquireWriteCapacity);
return getS3Client().deleteObject(request);
}));
d.close();
return Tuples.pair(d.asDuration(), Optional.of(response));
} catch (AwsServiceException ase) {
// 404 errors get swallowed; this can be raised by
// third party stores (GCS).
if (!isObjectNotFound(ase)) {
throw ase;
}
d.close();
return Tuples.pair(d.asDuration(), Optional.empty());
} catch (IOException e) {
// convert to unchecked.
throw new UncheckedIOException(e);
}
}
/**
* Upload part of a multi-partition file.
* Increments the write and put counters.
* <i>Important: this call does not close any input stream in the body.</i>
* <p>
* Retry Policy: none.
* @param trackerFactory duration tracker factory for operation
* @param request the upload part request.
* @param body the request body.
* @return the result of the operation.
* @throws AwsServiceException on problems
* @throws UncheckedIOException failure to instantiate the s3 client
*/
@Override
@Retries.OnceRaw
public UploadPartResponse uploadPart(
final UploadPartRequest request,
final RequestBody body,
@Nullable final DurationTrackerFactory trackerFactory)
throws AwsServiceException, UncheckedIOException {
long len = request.contentLength();
incrementPutStartStatistics(len);
try {
UploadPartResponse uploadPartResponse = trackDurationOfSupplier(
nonNullDurationTrackerFactory(trackerFactory),
MULTIPART_UPLOAD_PART_PUT.getSymbol(), () ->
getS3Client().uploadPart(request, body));
incrementPutCompletedStatistics(true, len);
return uploadPartResponse;
} catch (AwsServiceException e) {
incrementPutCompletedStatistics(false, len);
throw e;
}
}
/**
* Start a transfer-manager managed async PUT of an object,
* incrementing the put requests and put bytes
* counters.
* <p>
* It does not update the other counters,
* as existing code does that as progress callbacks come in.
* Byte length is calculated from the file length, or, if there is no
* file, from the content length of the header.
* <p>
* Because the operation is async, any stream supplied in the request
* must reference data (files, buffers) which stay valid until the upload
* completes.
* Retry policy: N/A: the transfer manager is performing the upload.
* Auditing: must be inside an audit span.
* @param putObjectRequest the request
* @param file the file to be uploaded
* @param listener the progress listener for the request
* @return the upload initiated
* @throws IOException if transfer manager creation failed.
*/
@Override
@Retries.OnceRaw
public UploadInfo putObject(
PutObjectRequest putObjectRequest,
File file,
ProgressableProgressListener listener) throws IOException {
long len = getPutRequestLength(putObjectRequest);
LOG.debug("PUT {} bytes to {} via transfer manager ", len, putObjectRequest.key());
incrementPutStartStatistics(len);
FileUpload upload = getOrCreateTransferManager().uploadFile(
UploadFileRequest.builder()
.putObjectRequest(putObjectRequest)
.source(file)
.addTransferListener(listener)
.build());
return new UploadInfo(upload, len);
}
/**
* Wait for an upload to complete.
* If the upload (or its result collection) failed, this is where
* the failure is raised as an AWS exception.
* Calls {@link S3AStore#incrementPutCompletedStatistics(boolean, long)}
* to update the statistics.
* @param key destination key
* @param uploadInfo upload to wait for
* @return the upload result
* @throws IOException IO failure
* @throws CancellationException if the wait() was cancelled
*/
@Override
@Retries.OnceTranslated
public CompletedFileUpload waitForUploadCompletion(String key, UploadInfo uploadInfo)
throws IOException {
FileUpload upload = uploadInfo.getFileUpload();
try {
CompletedFileUpload result = upload.completionFuture().join();
incrementPutCompletedStatistics(true, uploadInfo.getLength());
return result;
} catch (CompletionException e) {
LOG.info("Interrupted: aborting upload");
incrementPutCompletedStatistics(false, uploadInfo.getLength());
throw extractException("upload", key, e);
}
}
/**
* Complete a multipart upload.
* @param request request
* @return the response
*/
@Override
@Retries.OnceRaw
public CompleteMultipartUploadResponse completeMultipartUpload(
CompleteMultipartUploadRequest request) {
return getS3Client().completeMultipartUpload(request);
}
/**
* Get the directory allocator.
* @return the directory allocator
*/
@Override
public LocalDirAllocator getDirectoryAllocator() {
return directoryAllocator;
}
/**
* Demand create the directory allocator, then create a temporary file.
* This does not mark the file for deletion when a process exits.
* Pass in a file size of {@link LocalDirAllocator#SIZE_UNKNOWN} if the
* size is unknown.
* {@link LocalDirAllocator#createTmpFileForWrite(String, long, Configuration)}.
* @param pathStr prefix for the temporary file
* @param size the size of the file that is going to be written
* @param conf the Configuration object
* @return a unique temporary file
* @throws IOException IO problems
*/
@Override
public File createTemporaryFileForWriting(String pathStr,
long size,
Configuration conf) throws IOException {
requireNonNull(directoryAllocator, "directory allocator not initialized");
Path path = directoryAllocator.getLocalPathForWrite(pathStr,
size, conf);
File dir = new File(path.getParent().toUri().getPath());
String prefix = path.getName();
// create a temp file on this directory
return File.createTempFile(prefix, null, dir);
}
/*
=============== BEGIN ObjectInputStreamFactory ===============
*/
/**
* All stream factory initialization required after {@code Service.init()},
* after all other services have themselves been initialized.
*/
private void finishStreamFactoryInit() throws IOException {
// must be on be invoked during service initialization
Preconditions.checkState(isInState(STATE.INITED),
"Store is in wrong state: %s", getServiceState());
Preconditions.checkState(clientManager.isInState(STATE.INITED),
"Client Manager is in wrong state: %s", clientManager.getServiceState());
// finish initialization and pass down callbacks to self
objectInputStreamFactory.bind(new FactoryBindingParameters(new FactoryCallbacks()));
}
@Override /* ObjectInputStreamFactory */
public ObjectInputStream readObject(ObjectReadParameters parameters)
throws IOException {
parameters.withDirectoryAllocator(getDirectoryAllocator());
return objectInputStreamFactory.readObject(parameters.validate());
}
@Override /* ObjectInputStreamFactory */
public StreamFactoryRequirements factoryRequirements() {
return objectInputStreamFactory.factoryRequirements();
}
/**
* This operation is not implemented, as
* is this class which invokes it on the actual factory.
* @param factoryBindingParameters ignored
* @throws UnsupportedOperationException always
*/
@Override /* ObjectInputStreamFactory */
public void bind(final FactoryBindingParameters factoryBindingParameters) {
throw new UnsupportedOperationException("Not supported");
}
@Override /* ObjectInputStreamFactory */
public InputStreamType streamType() {
return objectInputStreamFactory.streamType();
}
/**
* Callbacks from {@link ObjectInputStreamFactory} instances.
*/
private class FactoryCallbacks implements StreamFactoryCallbacks {
@Override
public S3Client getOrCreateSyncClient() throws IOException {
LOG.debug("Stream factory requested sync client");
return clientManager().getOrCreateS3Client();
}
@Override
public void incrementFactoryStatistic(Statistic statistic) {
incrementStatistic(statistic);
}
}
/*
=============== END ObjectInputStreamFactory ===============
*/
}
|
apache/kafka | 36,798 | streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamsProducerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.MockProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InvalidPidMappingException;
import org.apache.kafka.common.errors.InvalidProducerEpochException;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.errors.UnknownProducerIdException;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.errors.TaskMigratedException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode.AT_LEAST_ONCE;
import static org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode.EXACTLY_ONCE_V2;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.STRICT_STUBS)
public class StreamsProducerTest {
private static final double BUFFER_POOL_WAIT_TIME = 1;
private static final double FLUSH_TME = 2;
private static final double TXN_INIT_TIME = 3;
private static final double TXN_BEGIN_TIME = 4;
private static final double TXN_SEND_OFFSETS_TIME = 5;
private static final double TXN_COMMIT_TIME = 6;
private static final double TXN_ABORT_TIME = 7;
private static final double METADATA_WAIT_TIME = 8;
private final LogContext logContext = new LogContext("test ");
private final String topic = "topic";
private final Cluster cluster = new Cluster(
"cluster",
Collections.singletonList(Node.noNode()),
Collections.singletonList(new PartitionInfo(topic, 0, Node.noNode(), new Node[0], new Node[0])),
Collections.emptySet(),
Collections.emptySet()
);
private final Time mockTime = mock(Time.class);
@SuppressWarnings("unchecked")
final Producer<byte[], byte[]> mockedProducer = mock(Producer.class);
private final StreamsProducer streamsProducerWithMock = new StreamsProducer(
mockedProducer,
AT_LEAST_ONCE,
mockTime,
logContext
);
private final StreamsProducer eosStreamsProducerWithMock = new StreamsProducer(
mockedProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
private final MockProducer<byte[], byte[]> nonEosMockProducer
= new MockProducer<>(cluster, true, new org.apache.kafka.clients.producer.RoundRobinPartitioner(), new ByteArraySerializer(), new ByteArraySerializer());
private final MockProducer<byte[], byte[]> eosMockProducer
= new MockProducer<>(cluster, true, new org.apache.kafka.clients.producer.RoundRobinPartitioner(), new ByteArraySerializer(), new ByteArraySerializer());
private StreamsProducer nonEosStreamsProducer;
private StreamsProducer eosStreamsProducer;
private final ProducerRecord<byte[], byte[]> record =
new ProducerRecord<>(topic, 0, 0L, new byte[0], new byte[0], new RecordHeaders());
private final Map<TopicPartition, OffsetAndMetadata> offsetsAndMetadata = mkMap(
mkEntry(new TopicPartition(topic, 0), new OffsetAndMetadata(0L, null))
);
@BeforeEach
public void before() {
nonEosStreamsProducer =
new StreamsProducer(
nonEosMockProducer,
AT_LEAST_ONCE,
mockTime,
logContext
);
eosStreamsProducer =
new StreamsProducer(
eosMockProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
eosStreamsProducer.initTransaction();
when(mockTime.nanoseconds()).thenReturn(Time.SYSTEM.nanoseconds());
}
// common tests (non-EOS and EOS)
// functional tests
@Test
public void shouldResetTransactionInFlightOnClose() {
// given:
eosStreamsProducer.send(
new ProducerRecord<>("topic", new byte[1]), (metadata, error) -> { });
assertThat(eosStreamsProducer.transactionInFlight(), is(true));
// when:
eosStreamsProducer.close();
// then:
assertThat(eosStreamsProducer.transactionInFlight(), is(false));
}
@Test
public void shouldResetTransactionInFlightOnReset() {
// given:
eosStreamsProducer.send(new ProducerRecord<>("topic", new byte[1]), (metadata, error) -> { });
assertThat(eosStreamsProducer.transactionInFlight(), is(true));
// when:
eosStreamsProducer.resetProducer(null);
// then:
assertThat(eosStreamsProducer.transactionInFlight(), is(false));
}
@Test
public void shouldForwardCallToPartitionsFor() {
final List<PartitionInfo> expectedPartitionInfo = Collections.emptyList();
when(mockedProducer.partitionsFor(topic)).thenReturn(expectedPartitionInfo);
final List<PartitionInfo> partitionInfo = streamsProducerWithMock.partitionsFor(topic);
assertThat(partitionInfo, sameInstance(expectedPartitionInfo));
}
@Test
public void shouldForwardCallToFlush() {
streamsProducerWithMock.flush();
verify(mockedProducer).flush();
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void shouldForwardCallToMetrics() {
final Map metrics = new HashMap<>();
when(mockedProducer.metrics()).thenReturn(metrics);
assertSame(metrics, streamsProducerWithMock.metrics());
}
@Test
public void shouldForwardCallToClose() {
streamsProducerWithMock.close();
verify(mockedProducer).close();
}
// error handling tests
@Test
public void shouldFailIfProcessingModeIsNull() {
final NullPointerException thrown = assertThrows(
NullPointerException.class,
() -> new StreamsProducer(
mockedProducer,
null,
mockTime,
logContext
)
);
assertThat(thrown.getMessage(), is("processingMode cannot be null"));
}
@Test
public void shouldFailIfProducerIsNull() {
final NullPointerException thrown = assertThrows(
NullPointerException.class,
() -> new StreamsProducer(
null,
AT_LEAST_ONCE,
mockTime,
logContext
)
);
assertThat(thrown.getMessage(), is("producer cannot be null"));
}
@Test
public void shouldFailIfTimeIsNull() {
final NullPointerException thrown = assertThrows(
NullPointerException.class,
() -> new StreamsProducer(
mockedProducer,
AT_LEAST_ONCE,
null,
logContext
)
);
assertThat(thrown.getMessage(), is("time cannot be null"));
}
@Test
public void shouldFailIfLogContextIsNull() {
final NullPointerException thrown = assertThrows(
NullPointerException.class,
() -> new StreamsProducer(
mockedProducer,
AT_LEAST_ONCE,
mockTime,
null
)
);
assertThat(thrown.getMessage(), is("logContext cannot be null"));
}
@Test
public void shouldFailOnResetProducerForAtLeastOnce() {
final IllegalStateException thrown = assertThrows(
IllegalStateException.class,
() -> nonEosStreamsProducer.resetProducer(null)
);
assertThat(thrown.getMessage(), is("Expected EOS to be enabled, but processing mode is at_least_once"));
}
// non-EOS tests
// functional tests
@Test
public void shouldNotInitTxIfEosDisable() {
assertThat(nonEosMockProducer.transactionInitialized(), is(false));
}
@Test
public void shouldNotBeginTxOnSendIfEosDisable() {
nonEosStreamsProducer.send(record, null);
assertThat(nonEosMockProducer.transactionInFlight(), is(false));
}
@Test
public void shouldForwardRecordOnSend() {
nonEosStreamsProducer.send(record, null);
assertThat(nonEosMockProducer.history().size(), is(1));
assertThat(nonEosMockProducer.history().get(0), is(record));
}
// error handling tests
@Test
public void shouldFailOnInitTxIfEosDisabled() {
final IllegalStateException thrown = assertThrows(
IllegalStateException.class,
nonEosStreamsProducer::initTransaction
);
assertThat(thrown.getMessage(), is("Exactly-once is not enabled [test]"));
}
@Test
public void shouldThrowStreamsExceptionOnSendError() {
nonEosMockProducer.sendException = new KafkaException("KABOOM!");
final StreamsException thrown = assertThrows(
StreamsException.class,
() -> nonEosStreamsProducer.send(record, null)
);
assertThat(thrown.getCause(), is(nonEosMockProducer.sendException));
assertThat(thrown.getMessage(), is("Error encountered trying to send record to topic topic [test]"));
}
@Test
public void shouldFailOnSendFatal() {
nonEosMockProducer.sendException = new RuntimeException("KABOOM!");
final RuntimeException thrown = assertThrows(
RuntimeException.class,
() -> nonEosStreamsProducer.send(record, null)
);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldFailOnCommitIfEosDisabled() {
final IllegalStateException thrown = assertThrows(
IllegalStateException.class,
() -> nonEosStreamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId"))
);
assertThat(thrown.getMessage(), is("Exactly-once is not enabled [test]"));
}
@Test
public void shouldFailOnAbortIfEosDisabled() {
final IllegalStateException thrown = assertThrows(
IllegalStateException.class,
nonEosStreamsProducer::abortTransaction
);
assertThat(thrown.getMessage(), is("Exactly-once is not enabled [test]"));
}
// EOS tests
// functional tests
@Test
public void shouldInitTxOnEos() {
assertThat(eosMockProducer.transactionInitialized(), is(true));
}
@Test
public void shouldBeginTxOnEosSend() {
eosStreamsProducer.send(record, null);
assertThat(eosMockProducer.transactionInFlight(), is(true));
}
@Test
public void shouldContinueTxnSecondEosSend() {
eosStreamsProducer.send(record, null);
eosStreamsProducer.send(record, null);
assertThat(eosMockProducer.transactionInFlight(), is(true));
assertThat(eosMockProducer.uncommittedRecords().size(), is(2));
}
@Test
public void shouldForwardRecordButNotCommitOnEosSend() {
eosStreamsProducer.send(record, null);
assertThat(eosMockProducer.transactionInFlight(), is(true));
assertThat(eosMockProducer.history().isEmpty(), is(true));
assertThat(eosMockProducer.uncommittedRecords().size(), is(1));
assertThat(eosMockProducer.uncommittedRecords().get(0), is(record));
}
@Test
public void shouldBeginTxOnEosCommit() {
eosStreamsProducerWithMock.initTransaction();
eosStreamsProducerWithMock.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"));
verify(mockedProducer).initTransactions();
verify(mockedProducer).beginTransaction();
verify(mockedProducer).sendOffsetsToTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"));
verify(mockedProducer).commitTransaction();
}
@Test
public void shouldSendOffsetToTxOnEosCommit() {
eosStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"));
assertThat(eosMockProducer.sentOffsets(), is(true));
}
@Test
public void shouldCommitTxOnEosCommit() {
eosStreamsProducer.send(record, null);
assertThat(eosMockProducer.transactionInFlight(), is(true));
eosStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"));
assertThat(eosMockProducer.transactionInFlight(), is(false));
assertThat(eosMockProducer.uncommittedRecords().isEmpty(), is(true));
assertThat(eosMockProducer.uncommittedOffsets().isEmpty(), is(true));
assertThat(eosMockProducer.history().size(), is(1));
assertThat(eosMockProducer.history().get(0), is(record));
assertThat(eosMockProducer.consumerGroupOffsetsHistory().size(), is(1));
assertThat(eosMockProducer.consumerGroupOffsetsHistory().get(0).get("appId"), is(offsetsAndMetadata));
}
@Test
public void shouldCommitTxWithConsumerGroupMetadataOnEosCommit() {
when(mockedProducer.send(record, null)).thenReturn(null);
final StreamsProducer streamsProducer = new StreamsProducer(
mockedProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
streamsProducer.initTransaction();
// call `send()` to start a transaction
streamsProducer.send(record, null);
streamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId"));
verify(mockedProducer).initTransactions();
verify(mockedProducer).beginTransaction();
verify(mockedProducer).sendOffsetsToTransaction(null, new ConsumerGroupMetadata("appId"));
verify(mockedProducer).commitTransaction();
}
@Test
public void shouldAbortTxOnEosAbort() {
// call `send()` to start a transaction
eosStreamsProducer.send(record, null);
assertThat(eosMockProducer.transactionInFlight(), is(true));
assertThat(eosMockProducer.uncommittedRecords().size(), is(1));
assertThat(eosMockProducer.uncommittedRecords().get(0), is(record));
eosStreamsProducer.abortTransaction();
assertThat(eosMockProducer.transactionInFlight(), is(false));
assertThat(eosMockProducer.uncommittedRecords().isEmpty(), is(true));
assertThat(eosMockProducer.uncommittedOffsets().isEmpty(), is(true));
assertThat(eosMockProducer.history().isEmpty(), is(true));
assertThat(eosMockProducer.consumerGroupOffsetsHistory().isEmpty(), is(true));
}
@Test
public void shouldSkipAbortTxOnEosAbortIfNotTxInFlight() {
eosStreamsProducerWithMock.initTransaction();
eosStreamsProducerWithMock.abortTransaction();
verify(mockedProducer).initTransactions();
}
// error handling tests
@Test
public void shouldThrowTimeoutExceptionOnEosInitTxTimeout() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new TimeoutException("KABOOM!");
final StreamsProducer streamsProducer = new StreamsProducer(
nonEosMockProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
final TimeoutException thrown = assertThrows(
TimeoutException.class,
streamsProducer::initTransaction
);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldFailOnMaybeBeginTransactionIfTransactionsNotInitializedForEos() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid auto-init Tx
final StreamsProducer streamsProducer =
new StreamsProducer(
nonEosMockProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
final IllegalStateException thrown = assertThrows(
IllegalStateException.class,
() -> streamsProducer.send(record, null)
);
assertThat(thrown.getMessage(), is("MockProducer hasn't been initialized for transactions."));
}
@Test
public void shouldThrowStreamsExceptionOnEosInitError() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new KafkaException("KABOOM!");
final StreamsProducer streamsProducer = new StreamsProducer(
nonEosMockProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
final StreamsException thrown = assertThrows(
StreamsException.class,
streamsProducer::initTransaction
);
assertThat(thrown.getCause(), is(nonEosMockProducer.initTransactionException));
assertThat(thrown.getMessage(), is("Error encountered trying to initialize transactions [test]"));
}
@Test
public void shouldFailOnEosInitFatal() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new RuntimeException("KABOOM!");
final StreamsProducer streamsProducer = new StreamsProducer(
nonEosMockProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
final RuntimeException thrown = assertThrows(
RuntimeException.class,
streamsProducer::initTransaction
);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldThrowTaskMigrateExceptionOnEosBeginTxnFenced() {
eosMockProducer.fenceProducer();
final TaskMigratedException thrown = assertThrows(
TaskMigratedException.class,
() -> eosStreamsProducer.send(null, null)
);
assertThat(
thrown.getMessage(),
is("Producer got fenced trying to begin a new transaction [test];" +
" it means all tasks belonging to this thread should be migrated.")
);
}
@Test
public void shouldThrowTaskMigrateExceptionOnEosBeginTxnError() {
eosMockProducer.beginTransactionException = new KafkaException("KABOOM!");
// calling `send()` implicitly starts a new transaction
final StreamsException thrown = assertThrows(
StreamsException.class,
() -> eosStreamsProducer.send(null, null));
assertThat(thrown.getCause(), is(eosMockProducer.beginTransactionException));
assertThat(
thrown.getMessage(),
is("Error encountered trying to begin a new transaction [test]")
);
}
@Test
public void shouldFailOnEosBeginTxnFatal() {
eosMockProducer.beginTransactionException = new RuntimeException("KABOOM!");
// calling `send()` implicitly starts a new transaction
final RuntimeException thrown = assertThrows(
RuntimeException.class,
() -> eosStreamsProducer.send(null, null));
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosSendProducerFenced() {
testThrowTaskMigratedExceptionOnEosSend(new ProducerFencedException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosSendPInvalidPidMapping() {
testThrowTaskMigratedExceptionOnEosSend(new InvalidPidMappingException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosSendInvalidEpoch() {
testThrowTaskMigratedExceptionOnEosSend(new InvalidProducerEpochException("KABOOM!"));
}
private void testThrowTaskMigratedExceptionOnEosSend(final RuntimeException exception) {
// we need to mimic that `send()` always wraps error in a KafkaException
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
eosMockProducer.sendException = new KafkaException(exception);
final TaskMigratedException thrown = assertThrows(
TaskMigratedException.class,
() -> eosStreamsProducer.send(record, null)
);
assertThat(thrown.getCause(), is(exception));
assertThat(
thrown.getMessage(),
is("Producer got fenced trying to send a record [test];" +
" it means all tasks belonging to this thread should be migrated.")
);
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosSendUnknownPid() {
final UnknownProducerIdException exception = new UnknownProducerIdException("KABOOM!");
// we need to mimic that `send()` always wraps error in a KafkaException
eosMockProducer.sendException = new KafkaException(exception);
final TaskMigratedException thrown = assertThrows(
TaskMigratedException.class,
() -> eosStreamsProducer.send(record, null)
);
assertThat(thrown.getCause(), is(exception));
assertThat(
thrown.getMessage(),
is("Producer got fenced trying to send a record [test];" +
" it means all tasks belonging to this thread should be migrated.")
);
}
@Test
public void shouldThrowTaskMigrateExceptionOnEosSendOffsetProducerFenced() {
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
testThrowTaskMigrateExceptionOnEosSendOffset(new ProducerFencedException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigrateExceptionOnEosSendOffsetInvalidPidMapping() {
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
testThrowTaskMigrateExceptionOnEosSendOffset(new InvalidPidMappingException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigrateExceptionOnEosSendOffsetInvalidEpoch() {
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
testThrowTaskMigrateExceptionOnEosSendOffset(new InvalidProducerEpochException("KABOOM!"));
}
private void testThrowTaskMigrateExceptionOnEosSendOffset(final RuntimeException exception) {
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
eosMockProducer.sendOffsetsToTransactionException = exception;
final TaskMigratedException thrown = assertThrows(
TaskMigratedException.class,
// we pass in `null` to verify that `sendOffsetsToTransaction()` fails instead of `commitTransaction()`
// `sendOffsetsToTransaction()` would throw an NPE on `null` offsets
() -> eosStreamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId"))
);
assertThat(thrown.getCause(), is(eosMockProducer.sendOffsetsToTransactionException));
assertThat(
thrown.getMessage(),
is("Producer got fenced trying to add offsets to a transaction [test];" +
" it means all tasks belonging to this thread should be migrated.")
);
}
@Test
public void shouldThrowStreamsExceptionOnEosSendOffsetError() {
eosMockProducer.sendOffsetsToTransactionException = new KafkaException("KABOOM!");
final StreamsException thrown = assertThrows(
StreamsException.class,
// we pass in `null` to verify that `sendOffsetsToTransaction()` fails instead of `commitTransaction()`
// `sendOffsetsToTransaction()` would throw an NPE on `null` offsets
() -> eosStreamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId"))
);
assertThat(thrown.getCause(), is(eosMockProducer.sendOffsetsToTransactionException));
assertThat(
thrown.getMessage(),
is("Error encountered trying to add offsets to a transaction [test]")
);
}
@Test
public void shouldFailOnEosSendOffsetFatal() {
eosMockProducer.sendOffsetsToTransactionException = new RuntimeException("KABOOM!");
final RuntimeException thrown = assertThrows(
RuntimeException.class,
// we pass in `null` to verify that `sendOffsetsToTransaction()` fails instead of `commitTransaction()`
// `sendOffsetsToTransaction()` would throw an NPE on `null` offsets
() -> eosStreamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId"))
);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosCommitWithProducerFenced() {
testThrowTaskMigratedExceptionOnEos(new ProducerFencedException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosCommitWithInvalidPidMapping() {
testThrowTaskMigratedExceptionOnEos(new InvalidPidMappingException("KABOOM!"));
}
@Test
public void shouldThrowTaskMigratedExceptionOnEosCommitWithInvalidEpoch() {
testThrowTaskMigratedExceptionOnEos(new InvalidProducerEpochException("KABOOM!"));
}
private void testThrowTaskMigratedExceptionOnEos(final RuntimeException exception) {
// cannot use `eosMockProducer.fenceProducer()` because this would already trigger in `beginTransaction()`
eosMockProducer.commitTransactionException = exception;
final TaskMigratedException thrown = assertThrows(
TaskMigratedException.class,
() -> eosStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"))
);
assertThat(eosMockProducer.sentOffsets(), is(true));
assertThat(thrown.getCause(), is(eosMockProducer.commitTransactionException));
assertThat(
thrown.getMessage(),
is("Producer got fenced trying to commit a transaction [test];" +
" it means all tasks belonging to this thread should be migrated.")
);
}
@Test
public void shouldThrowStreamsExceptionOnEosCommitTxError() {
eosMockProducer.commitTransactionException = new KafkaException("KABOOM!");
final StreamsException thrown = assertThrows(
StreamsException.class,
() -> eosStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"))
);
assertThat(eosMockProducer.sentOffsets(), is(true));
assertThat(thrown.getCause(), is(eosMockProducer.commitTransactionException));
assertThat(
thrown.getMessage(),
is("Error encountered trying to commit a transaction [test]")
);
}
@Test
public void shouldFailOnEosCommitTxFatal() {
eosMockProducer.commitTransactionException = new RuntimeException("KABOOM!");
final RuntimeException thrown = assertThrows(
RuntimeException.class,
() -> eosStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId"))
);
assertThat(eosMockProducer.sentOffsets(), is(true));
assertThat(thrown.getMessage(), is("KABOOM!"));
}
@Test
public void shouldSwallowExceptionOnEosAbortTxProducerFenced() {
testSwallowExceptionOnEosAbortTx(new ProducerFencedException("KABOOM!"));
}
@Test
public void shouldSwallowExceptionOnEosAbortTxInvalidPidMapping() {
testSwallowExceptionOnEosAbortTx(new InvalidPidMappingException("KABOOM!"));
}
@Test
public void shouldSwallowExceptionOnEosAbortTxInvalidEpoch() {
testSwallowExceptionOnEosAbortTx(new InvalidProducerEpochException("KABOOM!"));
}
private void testSwallowExceptionOnEosAbortTx(final RuntimeException exception) {
when(mockedProducer.send(record, null)).thenReturn(null);
doThrow(exception).when(mockedProducer).abortTransaction();
eosStreamsProducerWithMock.initTransaction();
// call `send()` to start a transaction
eosStreamsProducerWithMock.send(record, null);
eosStreamsProducerWithMock.abortTransaction();
verify(mockedProducer).initTransactions();
verify(mockedProducer).beginTransaction();
}
@Test
public void shouldThrowStreamsExceptionOnEosAbortTxError() {
eosMockProducer.abortTransactionException = new KafkaException("KABOOM!");
// call `send()` to start a transaction
eosStreamsProducer.send(record, null);
final StreamsException thrown = assertThrows(StreamsException.class, eosStreamsProducer::abortTransaction);
assertThat(thrown.getCause(), is(eosMockProducer.abortTransactionException));
assertThat(
thrown.getMessage(),
is("Error encounter trying to abort a transaction [test]")
);
}
@Test
public void shouldFailOnEosAbortTxFatal() {
eosMockProducer.abortTransactionException = new RuntimeException("KABOOM!");
// call `send()` to start a transaction
eosStreamsProducer.send(record, null);
final RuntimeException thrown = assertThrows(RuntimeException.class, eosStreamsProducer::abortTransaction);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
// EOS test
// functional tests
@Test
public void shouldCloseExistingProducerOnResetProducer() {
eosStreamsProducer.resetProducer(null);
assertTrue(eosMockProducer.closed());
}
@SuppressWarnings("unchecked")
@Test
public void shouldSetNewProducerOnResetProducer() {
final Producer<byte[], byte[]> newProducer = mock(Producer.class);
eosStreamsProducer.resetProducer(newProducer);
assertThat(eosStreamsProducer.kafkaProducer(), is(newProducer));
}
@Test
public void shouldResetTransactionInitializedOnResetProducer() {
final StreamsProducer streamsProducer = new StreamsProducer(
mockedProducer,
EXACTLY_ONCE_V2,
mockTime,
logContext
);
streamsProducer.initTransaction();
when(mockedProducer.metrics()).thenReturn(Collections.emptyMap());
streamsProducer.resetProducer(mockedProducer);
streamsProducer.initTransaction();
verify(mockedProducer).close();
// streamsProducer.resetProducer() should reset 'transactionInitialized' field so that subsequent call of the
// streamsProducer.initTransactions() method can start new transaction.
// Therefore, mockedProducer.initTransactions() is expected to be called twice.
verify(mockedProducer, times(2)).initTransactions();
}
@Test
public void shouldComputeTotalBlockedTime() {
setProducerMetrics(
nonEosMockProducer,
BUFFER_POOL_WAIT_TIME,
FLUSH_TME,
TXN_INIT_TIME,
TXN_BEGIN_TIME,
TXN_SEND_OFFSETS_TIME,
TXN_COMMIT_TIME,
TXN_ABORT_TIME,
METADATA_WAIT_TIME
);
final double expectedTotalBlocked = BUFFER_POOL_WAIT_TIME + FLUSH_TME + TXN_INIT_TIME +
TXN_BEGIN_TIME + TXN_SEND_OFFSETS_TIME + TXN_COMMIT_TIME + TXN_ABORT_TIME +
METADATA_WAIT_TIME;
assertThat(nonEosStreamsProducer.totalBlockedTime(), closeTo(expectedTotalBlocked, 0.01));
}
@Test
public void shouldComputeTotalBlockedTimeAfterReset() {
setProducerMetrics(
eosMockProducer,
BUFFER_POOL_WAIT_TIME,
FLUSH_TME,
TXN_INIT_TIME,
TXN_BEGIN_TIME,
TXN_SEND_OFFSETS_TIME,
TXN_COMMIT_TIME,
TXN_ABORT_TIME,
METADATA_WAIT_TIME
);
final double expectedTotalBlocked = BUFFER_POOL_WAIT_TIME + FLUSH_TME + TXN_INIT_TIME +
TXN_BEGIN_TIME + TXN_SEND_OFFSETS_TIME + TXN_COMMIT_TIME + TXN_ABORT_TIME +
METADATA_WAIT_TIME;
assertThat(eosStreamsProducer.totalBlockedTime(), equalTo(expectedTotalBlocked));
final long closeStart = 1L;
final long closeDelay = 1L;
when(mockTime.nanoseconds()).thenReturn(closeStart).thenReturn(closeStart + closeDelay);
eosStreamsProducer.resetProducer(eosMockProducer);
setProducerMetrics(
eosMockProducer,
BUFFER_POOL_WAIT_TIME,
FLUSH_TME,
TXN_INIT_TIME,
TXN_BEGIN_TIME,
TXN_SEND_OFFSETS_TIME,
TXN_COMMIT_TIME,
TXN_ABORT_TIME,
METADATA_WAIT_TIME
);
assertThat(
eosStreamsProducer.totalBlockedTime(),
closeTo(2 * expectedTotalBlocked + closeDelay, 0.01)
);
}
private MetricName metricName(final String name) {
return new MetricName(name, "", "", Collections.emptyMap());
}
private void addMetric(
final MockProducer<?, ?> producer,
final String name,
final double value) {
final MetricName metricName = metricName(name);
producer.setMockMetrics(metricName, new Metric() {
@Override
public MetricName metricName() {
return metricName;
}
@Override
public Object metricValue() {
return value;
}
});
}
private void setProducerMetrics(
final MockProducer<?, ?> producer,
final double bufferPoolWaitTime,
final double flushTime,
final double txnInitTime,
final double txnBeginTime,
final double txnSendOffsetsTime,
final double txnCommitTime,
final double txnAbortTime,
final double metadataWaitTime) {
addMetric(producer, "bufferpool-wait-time-ns-total", bufferPoolWaitTime);
addMetric(producer, "flush-time-ns-total", flushTime);
addMetric(producer, "txn-init-time-ns-total", txnInitTime);
addMetric(producer, "txn-begin-time-ns-total", txnBeginTime);
addMetric(producer, "txn-send-offsets-time-ns-total", txnSendOffsetsTime);
addMetric(producer, "txn-commit-time-ns-total", txnCommitTime);
addMetric(producer, "txn-abort-time-ns-total", txnAbortTime);
addMetric(producer, "metadata-wait-time-ns-total", metadataWaitTime);
}
}
|
google/guava | 36,866 | android/guava-tests/test/com/google/common/collect/MinMaxPriorityQueueTest.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Platform.reduceExponentIfGwt;
import static com.google.common.collect.Platform.reduceIterationsIfGwt;
import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.asList;
import static java.util.Collections.shuffle;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.collect.testing.IteratorFeature;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.QueueTestSuiteBuilder;
import com.google.common.collect.testing.TestStringQueueGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.testing.NullPointerTester;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Random;
import java.util.SortedMap;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* Unit test for {@link MinMaxPriorityQueue}.
*
* @author Alexei Stolboushkin
* @author Sverre Sundsdal
*/
@GwtCompatible
@NullMarked
public class MinMaxPriorityQueueTest extends TestCase {
private static final Ordering<Integer> SOME_COMPARATOR = Ordering.<Integer>natural().reverse();
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(MinMaxPriorityQueueTest.class);
suite.addTest(
QueueTestSuiteBuilder.using(
new TestStringQueueGenerator() {
@Override
protected Queue<String> create(String[] elements) {
return MinMaxPriorityQueue.create(asList(elements));
}
})
.named("MinMaxPriorityQueue")
.withFeatures(CollectionSize.ANY, CollectionFeature.GENERAL_PURPOSE)
.createTestSuite());
return suite;
}
// Overkill alert! Test all combinations of 0-2 options during creation.
public void testCreation_simple() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create();
assertEquals(11, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_comparator() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).create();
assertEquals(11, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
// We use the rawtypeToWildcard "cast" to make the test work with J2KT in other tests. Leaving one
// test without that cast to verify that using the raw Comparable works outside J2KT.
@J2ktIncompatible // J2KT's translation of raw Comparable is not a supertype of Int translation
public void testCreation_expectedSize() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.expectedSize(8).create();
assertEquals(8, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_expectedSize_comparator() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).expectedSize(8).create();
assertEquals(8, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.maximumSize(42)).create();
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
public void testCreation_comparator_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).maximumSize(42).create();
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_expectedSize_maximumSize() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(8)).maximumSize(42).create();
assertEquals(8, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
private static final ImmutableList<Integer> NUMBERS = ImmutableList.of(4, 8, 15, 16, 23, 42);
public void testCreation_withContents() {
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_comparator_withContents() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
checkUnbounded(queue);
assertSame(SOME_COMPARATOR, queue.comparator());
}
public void testCreation_expectedSize_withContents() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(8)).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(8, queue.capacity());
checkUnbounded(queue);
checkNatural(queue);
}
public void testCreation_maximumSize_withContents() {
MinMaxPriorityQueue<Integer> queue =
rawtypeToWildcard(MinMaxPriorityQueue.maximumSize(42)).create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(11, queue.capacity());
assertEquals(42, queue.maximumSize);
checkNatural(queue);
}
// Now test everything at once
public void testCreation_allOptions() {
MinMaxPriorityQueue<Integer> queue =
MinMaxPriorityQueue.orderedBy(SOME_COMPARATOR)
.expectedSize(8)
.maximumSize(42)
.create(NUMBERS);
assertEquals(6, queue.size());
assertEquals(8, queue.capacity());
assertEquals(42, queue.maximumSize);
assertSame(SOME_COMPARATOR, queue.comparator());
}
// TODO: tests that check the weird interplay between expected size,
// maximum size, size of initial contents, default capacity...
private static void checkNatural(MinMaxPriorityQueue<Integer> queue) {
assertSame(Ordering.natural(), queue.comparator());
}
private static void checkUnbounded(MinMaxPriorityQueue<Integer> queue) {
assertEquals(Integer.MAX_VALUE, queue.maximumSize);
}
public void testHeapIntact() {
Random random = new Random(0);
int heapSize = 99;
int numberOfModifications = 100;
MinMaxPriorityQueue<Integer> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(heapSize)).create();
/*
* this map would contain the same exact elements as the MinMaxHeap; the
* value in the map is the number of occurrences of the key.
*/
SortedMap<Integer, AtomicInteger> replica = Maps.newTreeMap();
assertTrue("Empty heap should be OK", mmHeap.isIntact());
for (int i = 0; i < heapSize; i++) {
int randomInt = random.nextInt();
mmHeap.offer(randomInt);
insertIntoReplica(replica, randomInt);
}
assertIntact(mmHeap);
assertEquals(heapSize, mmHeap.size());
int currentHeapSize = heapSize;
for (int i = 0; i < numberOfModifications; i++) {
if (random.nextBoolean()) {
/* insert a new element */
int randomInt = random.nextInt();
mmHeap.offer(randomInt);
insertIntoReplica(replica, randomInt);
currentHeapSize++;
} else {
/* remove either min or max */
if (random.nextBoolean()) {
removeMinFromReplica(replica, mmHeap.poll());
} else {
removeMaxFromReplica(replica, mmHeap.pollLast());
}
for (Integer v : replica.keySet()) {
assertThat(mmHeap).contains(v);
}
assertIntact(mmHeap);
currentHeapSize--;
assertEquals(currentHeapSize, mmHeap.size());
}
}
assertEquals(currentHeapSize, mmHeap.size());
assertIntact(mmHeap);
}
public void testSmall() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.add(1);
mmHeap.add(4);
mmHeap.add(2);
mmHeap.add(3);
assertEquals(4, (int) mmHeap.pollLast());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(3, (int) mmHeap.pollLast());
assertEquals(1, (int) mmHeap.peek());
assertEquals(2, (int) mmHeap.peekLast());
assertEquals(2, (int) mmHeap.pollLast());
assertEquals(1, (int) mmHeap.peek());
assertEquals(1, (int) mmHeap.peekLast());
assertEquals(1, (int) mmHeap.pollLast());
assertNull(mmHeap.peek());
assertNull(mmHeap.peekLast());
assertNull(mmHeap.pollLast());
}
public void testSmallMinHeap() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.add(1);
mmHeap.add(3);
mmHeap.add(2);
assertEquals(1, (int) mmHeap.peek());
assertEquals(1, (int) mmHeap.poll());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(2, (int) mmHeap.peek());
assertEquals(2, (int) mmHeap.poll());
assertEquals(3, (int) mmHeap.peekLast());
assertEquals(3, (int) mmHeap.peek());
assertEquals(3, (int) mmHeap.poll());
assertNull(mmHeap.peekLast());
assertNull(mmHeap.peek());
assertNull(mmHeap.poll());
}
public void testRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2, 3, 4, 47, 1, 5, 3, 0));
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals(9, mmHeap.size());
mmHeap.remove(5);
assertEquals(8, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertEquals(47, (int) mmHeap.pollLast());
assertEquals(4, (int) mmHeap.pollLast());
mmHeap.removeAll(Lists.newArrayList(2, 3));
assertEquals(3, mmHeap.size());
assertTrue("Heap is not intact after removeAll()", mmHeap.isIntact());
}
public void testContains() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 1, 2));
assertEquals(3, mmHeap.size());
assertFalse("Heap does not contain null", mmHeap.contains(null));
assertFalse("Heap does not contain 3", mmHeap.contains(3));
assertFalse("Heap does not contain 3", mmHeap.remove(3));
assertEquals(3, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertTrue("Heap contains two 1's", mmHeap.contains(1));
assertTrue("Heap contains two 1's", mmHeap.remove(1));
assertTrue("Heap contains 1", mmHeap.contains(1));
assertTrue("Heap contains 1", mmHeap.remove(1));
assertFalse("Heap does not contain 1", mmHeap.contains(1));
assertTrue("Heap contains 2", mmHeap.remove(2));
assertEquals(0, mmHeap.size());
assertFalse("Heap does not contain anything", mmHeap.contains(1));
assertFalse("Heap does not contain anything", mmHeap.remove(2));
}
public void testIteratorPastEndException() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2));
Iterator<Integer> it = mmHeap.iterator();
assertTrue("Iterator has reached end prematurely", it.hasNext());
it.next();
it.next();
assertThrows(NoSuchElementException.class, () -> it.next());
}
public void testIteratorConcurrentModification() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 2, 3, 4));
Iterator<Integer> it = mmHeap.iterator();
assertTrue("Iterator has reached end prematurely", it.hasNext());
it.next();
it.next();
mmHeap.remove(4);
assertThrows(ConcurrentModificationException.class, () -> it.next());
}
/** Tests a failure caused by fix to childless uncle issue. */
public void testIteratorRegressionChildlessUncle() {
ArrayList<Integer> initial = Lists.newArrayList(1, 15, 13, 8, 9, 10, 11, 14);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(initial);
assertIntact(q);
q.remove(9);
q.remove(11);
q.remove(10);
// Now we're in the critical state: [1, 15, 13, 8, 14]
// Removing 8 while iterating caused duplicates in iteration result.
List<Integer> result = Lists.newArrayListWithCapacity(initial.size());
for (Iterator<Integer> iter = q.iterator(); iter.hasNext(); ) {
Integer value = iter.next();
result.add(value);
if (value == 8) {
iter.remove();
}
}
assertIntact(q);
assertThat(result).containsExactly(1, 15, 13, 8, 14);
}
/**
* This tests a special case of the removeAt() call. Moving an element sideways on the heap could
* break the invariants. Sometimes we need to bubble an element up instead of trickling down. See
* implementation.
*/
public void testInvalidatingRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(
Lists.newArrayList(1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 300, 400, 500, 600));
assertEquals(15, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
mmHeap.remove(12);
assertEquals(14, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
}
/** This tests a more obscure special case, but otherwise similar to above. */
public void testInvalidatingRemove2() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
List<Integer> values =
Lists.newArrayList(
1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 300, 400, 500, 600, 4, 5, 6, 7, 8, 9, 4, 5,
200, 250);
mmHeap.addAll(values);
assertEquals(25, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
mmHeap.remove(2);
assertEquals(24, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
values.removeAll(Lists.newArrayList(2));
assertEquals(values.size(), mmHeap.size());
assertTrue(values.containsAll(mmHeap));
assertTrue(mmHeap.containsAll(values));
}
public void testIteratorInvalidatingIteratorRemove() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(Lists.newArrayList(1, 20, 100, 2, 3, 30, 40));
assertEquals(7, mmHeap.size());
assertTrue("Heap is not intact initially", mmHeap.isIntact());
Iterator<Integer> it = mmHeap.iterator();
assertEquals((Integer) 1, it.next());
assertEquals((Integer) 20, it.next());
assertEquals((Integer) 100, it.next());
assertEquals((Integer) 2, it.next());
it.remove();
assertFalse(mmHeap.contains(2));
assertTrue(it.hasNext());
assertEquals((Integer) 3, it.next());
assertTrue(it.hasNext());
assertEquals((Integer) 30, it.next());
assertTrue(it.hasNext());
assertEquals((Integer) 40, it.next());
assertFalse(it.hasNext());
assertEquals(6, mmHeap.size());
assertTrue("Heap is not intact after remove()", mmHeap.isIntact());
assertFalse(mmHeap.contains(2));
// This tests that it.remove() above actually changed the order. It
// indicates that the value 40 was stored in forgetMeNot, so it is
// returned in the last call to it.next(). Without it, 30 should be the last
// item returned by the iterator.
Integer lastItem = 0;
for (Integer tmp : mmHeap) {
lastItem = tmp;
}
assertEquals((Integer) 30, lastItem);
}
/**
* This tests a special case where removeAt has to trickle an element first down one level from a
* min to a max level, then up one level above the index of the removed element. It also tests
* that skipMe in the iterator plays nicely with forgetMeNot.
*/
public void testIteratorInvalidatingIteratorRemove2() {
MinMaxPriorityQueue<Integer> mmHeap = MinMaxPriorityQueue.create();
mmHeap.addAll(
Lists.newArrayList(1, 20, 1000, 2, 3, 30, 40, 10, 11, 12, 13, 200, 300, 500, 400));
assertTrue("Heap is not intact initially", mmHeap.isIntact());
Iterator<Integer> it = mmHeap.iterator();
assertEquals((Integer) 1, it.next());
assertEquals((Integer) 20, it.next());
assertEquals((Integer) 1000, it.next());
assertEquals((Integer) 2, it.next());
it.remove();
// After this remove, 400 has moved up and 20 down past cursor
assertTrue("Heap is not intact after remove", mmHeap.isIntact());
assertEquals((Integer) 10, it.next());
assertEquals((Integer) 3, it.next());
it.remove();
// After this remove, 400 moved down again and 500 up past the cursor
assertTrue("Heap is not intact after remove", mmHeap.isIntact());
assertEquals((Integer) 12, it.next());
assertEquals((Integer) 30, it.next());
assertEquals((Integer) 40, it.next());
// Skipping 20
assertEquals((Integer) 11, it.next());
// Not skipping 400, because it moved back down
assertEquals((Integer) 400, it.next());
assertEquals((Integer) 13, it.next());
assertEquals((Integer) 200, it.next());
assertEquals((Integer) 300, it.next());
// Last from forgetMeNot.
assertEquals((Integer) 500, it.next());
}
public void testRemoveFromStringHeap() {
MinMaxPriorityQueue<String> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(5)).create();
Collections.addAll(mmHeap, "foo", "bar", "foobar", "barfoo", "larry", "sergey", "eric");
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals("bar", mmHeap.peek());
assertEquals("sergey", mmHeap.peekLast());
assertEquals(7, mmHeap.size());
assertTrue("Could not remove larry", mmHeap.remove("larry"));
assertEquals(6, mmHeap.size());
assertFalse("heap contains larry which has been removed", mmHeap.contains("larry"));
assertTrue("heap does not contain sergey", mmHeap.contains("sergey"));
assertTrue("Could not remove larry", mmHeap.removeAll(Lists.newArrayList("sergey", "eric")));
assertFalse("Could remove nikesh which is not in the heap", mmHeap.remove("nikesh"));
assertEquals(4, mmHeap.size());
}
public void testCreateWithOrdering() {
MinMaxPriorityQueue<String> mmHeap =
MinMaxPriorityQueue.orderedBy(Ordering.<String>natural().reverse()).create();
Collections.addAll(mmHeap, "foo", "bar", "foobar", "barfoo", "larry", "sergey", "eric");
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals("sergey", mmHeap.peek());
assertEquals("bar", mmHeap.peekLast());
}
public void testCreateWithCapacityAndOrdering() {
MinMaxPriorityQueue<Integer> mmHeap =
MinMaxPriorityQueue.orderedBy(Ordering.<Integer>natural().reverse())
.expectedSize(5)
.create();
Collections.addAll(mmHeap, 1, 7, 2, 56, 2, 5, 23, 68, 0, 3);
assertTrue("Heap is not intact initially", mmHeap.isIntact());
assertEquals(68, (int) mmHeap.peek());
assertEquals(0, (int) mmHeap.peekLast());
}
private <T extends Comparable<T>> void runIterator(List<T> values, int steps) throws Exception {
IteratorTester<T> tester =
new IteratorTester<T>(
steps,
IteratorFeature.MODIFIABLE,
new LinkedList<>(values),
IteratorTester.KnownOrder.UNKNOWN_ORDER) {
private @Nullable MinMaxPriorityQueue<T> mmHeap;
@Override
protected Iterator<T> newTargetIterator() {
mmHeap = MinMaxPriorityQueue.create(values);
return mmHeap.iterator();
}
@Override
protected void verify(List<T> elements) {
assertEquals(new HashSet<>(elements), newHashSet(mmHeap.iterator()));
assertIntact(mmHeap);
}
};
tester.test();
}
public void testIteratorTester() throws Exception {
Random random = new Random(0);
List<Integer> list = new ArrayList<>();
for (int i = 0; i < 3; i++) {
list.add(random.nextInt());
}
runIterator(list, 6);
}
public void testIteratorTesterLarger() throws Exception {
runIterator(Lists.newArrayList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 5);
}
public void testRemoveAt() {
long seed = new Random().nextLong();
Random random = new Random(seed);
int heapSize = 999;
int numberOfModifications = reduceIterationsIfGwt(500);
MinMaxPriorityQueue<Integer> mmHeap =
rawtypeToWildcard(MinMaxPriorityQueue.expectedSize(heapSize)).create();
for (int i = 0; i < heapSize; i++) {
mmHeap.add(random.nextInt());
}
for (int i = 0; i < numberOfModifications; i++) {
mmHeap.removeAt(random.nextInt(mmHeap.size()));
assertIntactUsingSeed(seed, mmHeap);
mmHeap.add(random.nextInt());
assertIntactUsingSeed(seed, mmHeap);
}
}
public void testRemoveAt_exhaustive() {
int size = reduceExponentIfGwt(8);
List<Integer> expected = createOrderedList(size);
for (Collection<Integer> perm : Collections2.permutations(expected)) {
for (int i = 0; i < perm.size(); i++) {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(perm);
q.removeAt(i);
assertIntactUsingStartedWith(perm, q);
}
}
}
/** Regression test for bug found. */
public void testCorrectOrdering_regression() {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(ImmutableList.of(3, 5, 1, 4, 7));
List<Integer> expected = ImmutableList.of(1, 3, 4, 5, 7);
List<Integer> actual = new ArrayList<>(5);
for (int i = 0; i < expected.size(); i++) {
actual.add(q.pollFirst());
}
assertEquals(expected, actual);
}
public void testCorrectOrdering_smallHeapsPollFirst() {
for (int size = 2; size < 16; size++) {
for (int attempts = 0; attempts < size * (size - 1); attempts++) {
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
}
public void testCorrectOrdering_smallHeapsPollLast() {
for (int size = 2; size < 16; size++) {
for (int attempts = 0; attempts < size * (size - 1); attempts++) {
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(0, q.pollLast());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
}
public void testCorrectOrdering_mediumHeapsPollFirst() {
for (int attempts = 0; attempts < reduceIterationsIfGwt(5000); attempts++) {
int size = new Random().nextInt(256) + 16;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
/** Regression test for bug found in random testing. */
public void testCorrectOrdering_73ElementBug() {
int size = 73;
long seed = 7522346378524621981L;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
insertRandomly(elements, q, new Random(seed));
assertIntact(q);
while (!q.isEmpty()) {
elements.add(q.pollFirst());
assertIntact(q);
}
assertEqualsUsingSeed(seed, expected, elements);
}
public void testCorrectOrdering_mediumHeapsPollLast() {
for (int attempts = 0; attempts < reduceIterationsIfGwt(5000); attempts++) {
int size = new Random().nextInt(256) + 16;
ArrayList<Integer> elements = createOrderedList(size);
List<Integer> expected = ImmutableList.copyOf(elements);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
long seed = insertRandomly(elements, q);
while (!q.isEmpty()) {
elements.add(0, q.pollLast());
}
assertEqualsUsingSeed(seed, expected, elements);
}
}
public void testCorrectOrdering_randomAccess() {
long seed = new Random().nextLong();
Random random = new Random(seed);
PriorityQueue<Integer> control = new PriorityQueue<>();
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create();
for (int i = 0; i < 73; i++) { // 73 is a childless uncle case.
Integer element = random.nextInt();
control.add(element);
assertTrue(q.add(element));
}
assertIntact(q);
for (int i = 0; i < reduceIterationsIfGwt(500_000); i++) {
if (random.nextBoolean()) {
Integer element = random.nextInt();
control.add(element);
q.add(element);
} else {
assertEqualsUsingSeed(seed, control.poll(), q.pollFirst());
}
}
while (!control.isEmpty()) {
assertEqualsUsingSeed(seed, control.poll(), q.pollFirst());
}
assertTrue(q.isEmpty());
}
public void testExhaustive_pollAndPush() {
int size = 5;
List<Integer> expected = createOrderedList(size);
for (Collection<Integer> perm : Collections2.permutations(expected)) {
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(perm);
List<Integer> elements = Lists.newArrayListWithCapacity(size);
while (!q.isEmpty()) {
Integer next = q.pollFirst();
for (int i = 0; i <= size; i++) {
assertTrue(q.add(i));
assertTrue(q.add(next));
assertTrue(q.remove(i));
assertEquals(next, q.poll());
}
elements.add(next);
}
assertEqualsUsingStartedWith(perm, expected, elements);
}
}
/** Regression test for b/4124577 */
public void testRegression_dataCorruption() {
int size = 8;
List<Integer> expected = createOrderedList(size);
MinMaxPriorityQueue<Integer> q = MinMaxPriorityQueue.create(expected);
List<Integer> contents = new ArrayList<>(expected);
List<Integer> elements = Lists.newArrayListWithCapacity(size);
while (!q.isEmpty()) {
assertThat(q).containsExactlyElementsIn(contents);
Integer next = q.pollFirst();
contents.remove(next);
assertThat(q).containsExactlyElementsIn(contents);
for (int i = 0; i <= size; i++) {
q.add(i);
contents.add(i);
assertThat(q).containsExactlyElementsIn(contents);
q.add(next);
contents.add(next);
assertThat(q).containsExactlyElementsIn(contents);
q.remove(i);
assertTrue(contents.remove(Integer.valueOf(i)));
assertThat(q).containsExactlyElementsIn(contents);
assertEquals(next, q.poll());
contents.remove(next);
assertThat(q).containsExactlyElementsIn(contents);
}
elements.add(next);
}
assertEquals(expected, elements);
}
/** Regression test for https://github.com/google/guava/issues/2658 */
public void testRemoveRegression() {
MinMaxPriorityQueue<Long> queue =
MinMaxPriorityQueue.create(ImmutableList.of(2L, 3L, 0L, 4L, 1L));
queue.remove(4L);
queue.remove(1L);
assertThat(queue).doesNotContain(1L);
}
public void testRandomRemoves() {
Random random = new Random(0);
for (int attempts = 0; attempts < reduceIterationsIfGwt(1000); attempts++) {
ArrayList<Integer> elements = createOrderedList(10);
shuffle(elements, random);
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create(elements);
shuffle(elements, random);
for (Integer element : elements) {
assertThat(queue.remove(element)).isTrue();
assertIntact(queue);
assertThat(queue).doesNotContain(element);
}
assertThat(queue).isEmpty();
}
}
public void testRandomAddsAndRemoves() {
Random random = new Random(0);
Multiset<Integer> elements = HashMultiset.create();
MinMaxPriorityQueue<Integer> queue = MinMaxPriorityQueue.create();
int range = 10_000; // range should be small enough that equal elements occur semi-frequently
for (int iter = 0; iter < reduceIterationsIfGwt(1000); iter++) {
for (int i = 0; i < 100; i++) {
Integer element = random.nextInt(range);
elements.add(element);
queue.add(element);
}
Iterator<Integer> queueIterator = queue.iterator();
int remaining = queue.size();
while (queueIterator.hasNext()) {
Integer element = queueIterator.next();
remaining--;
assertThat(elements).contains(element);
if (random.nextBoolean()) {
elements.remove(element);
queueIterator.remove();
}
}
assertThat(remaining).isEqualTo(0);
assertIntact(queue);
assertThat(queue).containsExactlyElementsIn(elements);
}
}
private enum Element {
ONE,
TWO,
THREE,
FOUR,
FIVE;
}
public void testRandomAddsAndRemoves_duplicateElements() {
Random random = new Random(0);
Multiset<Element> elements = HashMultiset.create();
MinMaxPriorityQueue<Element> queue = MinMaxPriorityQueue.create();
int range = Element.values().length;
for (int iter = 0; iter < reduceIterationsIfGwt(1000); iter++) {
for (int i = 0; i < 100; i++) {
Element element = Element.values()[random.nextInt(range)];
elements.add(element);
queue.add(element);
}
Iterator<Element> queueIterator = queue.iterator();
int remaining = queue.size();
while (queueIterator.hasNext()) {
Element element = queueIterator.next();
remaining--;
assertThat(elements).contains(element);
if (random.nextBoolean()) {
elements.remove(element);
queueIterator.remove();
}
}
assertThat(remaining).isEqualTo(0);
assertIntact(queue);
assertThat(queue).containsExactlyElementsIn(elements);
}
}
/** Returns the seed used for the randomization. */
private long insertRandomly(ArrayList<Integer> elements, MinMaxPriorityQueue<Integer> q) {
long seed = new Random().nextLong();
Random random = new Random(seed);
insertRandomly(elements, q, random);
return seed;
}
private static void insertRandomly(
ArrayList<Integer> elements, MinMaxPriorityQueue<Integer> q, Random random) {
while (!elements.isEmpty()) {
int selectedIndex = random.nextInt(elements.size());
q.offer(elements.remove(selectedIndex));
}
}
private ArrayList<Integer> createOrderedList(int size) {
ArrayList<Integer> elements = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
elements.add(i);
}
return elements;
}
public void testIsEvenLevel() {
assertTrue(MinMaxPriorityQueue.isEvenLevel(0));
assertFalse(MinMaxPriorityQueue.isEvenLevel(1));
assertFalse(MinMaxPriorityQueue.isEvenLevel(2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(3));
assertFalse(MinMaxPriorityQueue.isEvenLevel((1 << 10) - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel((1 << 10) - 1));
int i = 1 << 29;
assertTrue(MinMaxPriorityQueue.isEvenLevel(i - 2));
assertFalse(MinMaxPriorityQueue.isEvenLevel(i - 1));
assertFalse(MinMaxPriorityQueue.isEvenLevel(i));
i = 1 << 30;
assertFalse(MinMaxPriorityQueue.isEvenLevel(i - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(i - 1));
assertTrue(MinMaxPriorityQueue.isEvenLevel(i));
// 1 << 31 is negative because of overflow, 1 << 31 - 1 is positive
// since isEvenLevel adds 1, we need to do - 2.
assertTrue(MinMaxPriorityQueue.isEvenLevel((1 << 31) - 2));
assertTrue(MinMaxPriorityQueue.isEvenLevel(Integer.MAX_VALUE - 1));
assertThrows(IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel((1 << 31) - 1));
assertThrows(
IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(Integer.MAX_VALUE));
assertThrows(IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(1 << 31));
assertThrows(
IllegalStateException.class, () -> MinMaxPriorityQueue.isEvenLevel(Integer.MIN_VALUE));
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicConstructors(MinMaxPriorityQueue.class);
tester.testAllPublicStaticMethods(MinMaxPriorityQueue.class);
tester.testAllPublicInstanceMethods(MinMaxPriorityQueue.<String>create());
}
private static void insertIntoReplica(Map<Integer, AtomicInteger> replica, int newValue) {
if (replica.containsKey(newValue)) {
replica.get(newValue).incrementAndGet();
} else {
replica.put(newValue, new AtomicInteger(1));
}
}
private static void removeMinFromReplica(
SortedMap<Integer, AtomicInteger> replica, int minValue) {
Integer replicatedMinValue = replica.firstKey();
assertEquals(replicatedMinValue, (Integer) minValue);
removeFromReplica(replica, replicatedMinValue);
}
private static void removeMaxFromReplica(
SortedMap<Integer, AtomicInteger> replica, int maxValue) {
Integer replicatedMaxValue = replica.lastKey();
assertTrue("maxValue is incorrect", replicatedMaxValue == maxValue);
removeFromReplica(replica, replicatedMaxValue);
}
private static void removeFromReplica(Map<Integer, AtomicInteger> replica, int value) {
AtomicInteger numOccur = replica.get(value);
if (numOccur.decrementAndGet() == 0) {
replica.remove(value);
}
}
private static void assertIntact(MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("State " + Arrays.toString(q.toArray()));
}
}
private static void assertIntactUsingSeed(long seed, MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("Using seed " + seed + ". State " + Arrays.toString(q.toArray()));
}
}
private static void assertIntactUsingStartedWith(
Collection<?> startedWith, MinMaxPriorityQueue<?> q) {
if (!q.isIntact()) {
fail("Started with " + startedWith + ". State " + Arrays.toString(q.toArray()));
}
}
private static void assertEqualsUsingSeed(
long seed, @Nullable Object expected, @Nullable Object actual) {
if (!Objects.equals(actual, expected)) {
// fail(), but with the JUnit-supplied message.
assertEquals("Using seed " + seed, expected, actual);
}
}
private static void assertEqualsUsingStartedWith(
Collection<?> startedWith, @Nullable Object expected, @Nullable Object actual) {
if (!Objects.equals(actual, expected)) {
// fail(), but with the JUnit-supplied message.
assertEquals("Started with " + startedWith, expected, actual);
}
}
// J2kt cannot translate the Comparable rawtype in a usable way (it becomes Comparable<Object>
// but types are typically only Comparable to themselves).
@SuppressWarnings({"rawtypes", "unchecked"})
private static MinMaxPriorityQueue.Builder<Comparable<?>> rawtypeToWildcard(
MinMaxPriorityQueue.Builder<Comparable> builder) {
return (MinMaxPriorityQueue.Builder) builder;
}
}
|
googleapis/google-cloud-java | 36,700 | java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/CreateBlueprintRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1;
/**
*
*
* <pre>
* Request object for `CreateBlueprint`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.CreateBlueprintRequest}
*/
public final class CreateBlueprintRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.CreateBlueprintRequest)
CreateBlueprintRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateBlueprintRequest.newBuilder() to construct.
private CreateBlueprintRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateBlueprintRequest() {
parent_ = "";
blueprintId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateBlueprintRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateBlueprintRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateBlueprintRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.class,
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BLUEPRINT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object blueprintId_ = "";
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The blueprintId.
*/
@java.lang.Override
public java.lang.String getBlueprintId() {
java.lang.Object ref = blueprintId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
blueprintId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for blueprintId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBlueprintIdBytes() {
java.lang.Object ref = blueprintId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
blueprintId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BLUEPRINT_FIELD_NUMBER = 3;
private com.google.cloud.telcoautomation.v1.Blueprint blueprint_;
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the blueprint field is set.
*/
@java.lang.Override
public boolean hasBlueprint() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The blueprint.
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.Blueprint getBlueprint() {
return blueprint_ == null
? com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()
: blueprint_;
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintOrBuilder() {
return blueprint_ == null
? com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()
: blueprint_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(blueprintId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, blueprintId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getBlueprint());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(blueprintId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, blueprintId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getBlueprint());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1.CreateBlueprintRequest)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest other =
(com.google.cloud.telcoautomation.v1.CreateBlueprintRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getBlueprintId().equals(other.getBlueprintId())) return false;
if (hasBlueprint() != other.hasBlueprint()) return false;
if (hasBlueprint()) {
if (!getBlueprint().equals(other.getBlueprint())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + BLUEPRINT_ID_FIELD_NUMBER;
hash = (53 * hash) + getBlueprintId().hashCode();
if (hasBlueprint()) {
hash = (37 * hash) + BLUEPRINT_FIELD_NUMBER;
hash = (53 * hash) + getBlueprint().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request object for `CreateBlueprint`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.CreateBlueprintRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.CreateBlueprintRequest)
com.google.cloud.telcoautomation.v1.CreateBlueprintRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateBlueprintRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateBlueprintRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.class,
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBlueprintFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
blueprintId_ = "";
blueprint_ = null;
if (blueprintBuilder_ != null) {
blueprintBuilder_.dispose();
blueprintBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_CreateBlueprintRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateBlueprintRequest getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateBlueprintRequest build() {
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateBlueprintRequest buildPartial() {
com.google.cloud.telcoautomation.v1.CreateBlueprintRequest result =
new com.google.cloud.telcoautomation.v1.CreateBlueprintRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.telcoautomation.v1.CreateBlueprintRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.blueprintId_ = blueprintId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.blueprint_ = blueprintBuilder_ == null ? blueprint_ : blueprintBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1.CreateBlueprintRequest) {
return mergeFrom((com.google.cloud.telcoautomation.v1.CreateBlueprintRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.telcoautomation.v1.CreateBlueprintRequest other) {
if (other == com.google.cloud.telcoautomation.v1.CreateBlueprintRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getBlueprintId().isEmpty()) {
blueprintId_ = other.blueprintId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasBlueprint()) {
mergeBlueprint(other.getBlueprint());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
blueprintId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getBlueprintFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of parent resource.
* Format should be -
* "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object blueprintId_ = "";
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The blueprintId.
*/
public java.lang.String getBlueprintId() {
java.lang.Object ref = blueprintId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
blueprintId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for blueprintId.
*/
public com.google.protobuf.ByteString getBlueprintIdBytes() {
java.lang.Object ref = blueprintId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
blueprintId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The blueprintId to set.
* @return This builder for chaining.
*/
public Builder setBlueprintId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
blueprintId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearBlueprintId() {
blueprintId_ = getDefaultInstance().getBlueprintId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The name of the blueprint.
* </pre>
*
* <code>string blueprint_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for blueprintId to set.
* @return This builder for chaining.
*/
public Builder setBlueprintIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
blueprintId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.telcoautomation.v1.Blueprint blueprint_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
blueprintBuilder_;
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the blueprint field is set.
*/
public boolean hasBlueprint() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The blueprint.
*/
public com.google.cloud.telcoautomation.v1.Blueprint getBlueprint() {
if (blueprintBuilder_ == null) {
return blueprint_ == null
? com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()
: blueprint_;
} else {
return blueprintBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBlueprint(com.google.cloud.telcoautomation.v1.Blueprint value) {
if (blueprintBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
blueprint_ = value;
} else {
blueprintBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBlueprint(
com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) {
if (blueprintBuilder_ == null) {
blueprint_ = builderForValue.build();
} else {
blueprintBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeBlueprint(com.google.cloud.telcoautomation.v1.Blueprint value) {
if (blueprintBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& blueprint_ != null
&& blueprint_ != com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()) {
getBlueprintBuilder().mergeFrom(value);
} else {
blueprint_ = value;
}
} else {
blueprintBuilder_.mergeFrom(value);
}
if (blueprint_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearBlueprint() {
bitField0_ = (bitField0_ & ~0x00000004);
blueprint_ = null;
if (blueprintBuilder_ != null) {
blueprintBuilder_.dispose();
blueprintBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.telcoautomation.v1.Blueprint.Builder getBlueprintBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getBlueprintFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintOrBuilder() {
if (blueprintBuilder_ != null) {
return blueprintBuilder_.getMessageOrBuilder();
} else {
return blueprint_ == null
? com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()
: blueprint_;
}
}
/**
*
*
* <pre>
* Required. The `Blueprint` to create.
* </pre>
*
* <code>
* .google.cloud.telcoautomation.v1.Blueprint blueprint = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
getBlueprintFieldBuilder() {
if (blueprintBuilder_ == null) {
blueprintBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>(
getBlueprint(), getParentForChildren(), isClean());
blueprint_ = null;
}
return blueprintBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.CreateBlueprintRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.CreateBlueprintRequest)
private static final com.google.cloud.telcoautomation.v1.CreateBlueprintRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.CreateBlueprintRequest();
}
public static com.google.cloud.telcoautomation.v1.CreateBlueprintRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateBlueprintRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateBlueprintRequest>() {
@java.lang.Override
public CreateBlueprintRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateBlueprintRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateBlueprintRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.CreateBlueprintRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,530 | java-batch/proto-google-cloud-batch-v1alpha/src/main/java/com/google/cloud/batch/v1alpha/ListJobsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/batch/v1alpha/batch.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.batch.v1alpha;
/**
*
*
* <pre>
* ListJob Request.
* </pre>
*
* Protobuf type {@code google.cloud.batch.v1alpha.ListJobsRequest}
*/
public final class ListJobsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.batch.v1alpha.ListJobsRequest)
ListJobsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListJobsRequest.newBuilder() to construct.
private ListJobsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListJobsRequest() {
parent_ = "";
filter_ = "";
orderBy_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListJobsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.batch.v1alpha.BatchProto
.internal_static_google_cloud_batch_v1alpha_ListJobsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.batch.v1alpha.BatchProto
.internal_static_google_cloud_batch_v1alpha_ListJobsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.batch.v1alpha.ListJobsRequest.class,
com.google.cloud.batch.v1alpha.ListJobsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.batch.v1alpha.ListJobsRequest)) {
return super.equals(obj);
}
com.google.cloud.batch.v1alpha.ListJobsRequest other =
(com.google.cloud.batch.v1alpha.ListJobsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.batch.v1alpha.ListJobsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ListJob Request.
* </pre>
*
* Protobuf type {@code google.cloud.batch.v1alpha.ListJobsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.batch.v1alpha.ListJobsRequest)
com.google.cloud.batch.v1alpha.ListJobsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.batch.v1alpha.BatchProto
.internal_static_google_cloud_batch_v1alpha_ListJobsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.batch.v1alpha.BatchProto
.internal_static_google_cloud_batch_v1alpha_ListJobsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.batch.v1alpha.ListJobsRequest.class,
com.google.cloud.batch.v1alpha.ListJobsRequest.Builder.class);
}
// Construct using com.google.cloud.batch.v1alpha.ListJobsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
orderBy_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.batch.v1alpha.BatchProto
.internal_static_google_cloud_batch_v1alpha_ListJobsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.batch.v1alpha.ListJobsRequest getDefaultInstanceForType() {
return com.google.cloud.batch.v1alpha.ListJobsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.batch.v1alpha.ListJobsRequest build() {
com.google.cloud.batch.v1alpha.ListJobsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.batch.v1alpha.ListJobsRequest buildPartial() {
com.google.cloud.batch.v1alpha.ListJobsRequest result =
new com.google.cloud.batch.v1alpha.ListJobsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.batch.v1alpha.ListJobsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.orderBy_ = orderBy_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.batch.v1alpha.ListJobsRequest) {
return mergeFrom((com.google.cloud.batch.v1alpha.ListJobsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.batch.v1alpha.ListJobsRequest other) {
if (other == com.google.cloud.batch.v1alpha.ListJobsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000008;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Parent path.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Sort results. Supported are "name", "name desc", "create_time",
* and "create_time desc".
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000008);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.batch.v1alpha.ListJobsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.batch.v1alpha.ListJobsRequest)
private static final com.google.cloud.batch.v1alpha.ListJobsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.batch.v1alpha.ListJobsRequest();
}
public static com.google.cloud.batch.v1alpha.ListJobsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListJobsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListJobsRequest>() {
@java.lang.Override
public ListJobsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListJobsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListJobsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.batch.v1alpha.ListJobsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hbase | 37,029 | hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.snapshot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.SnapshotType;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSVisitor;
import org.apache.hadoop.hbase.util.MD5Hash;
import org.apache.yetus.audience.InterfaceAudience;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
/**
* Utilities class for snapshots
*/
@InterfaceAudience.Private
public final class SnapshotTestingUtils {
private static final Logger LOG = LoggerFactory.getLogger(SnapshotTestingUtils.class);
// default number of regions (and keys) given by getSplitKeys() and createTable()
private static byte[] KEYS = Bytes.toBytes("0123456");
private SnapshotTestingUtils() {
// private constructor for utility class
}
/**
* Assert that we don't have any snapshots lists if the admin operation fails
*/
public static void assertNoSnapshots(Admin admin) throws IOException {
assertEquals("Have some previous snapshots", 0, admin.listSnapshots().size());
}
/**
* Make sure that there is only one snapshot returned from the master and its name and table match
* the passed in parameters.
*/
public static List<SnapshotDescription> assertExistsMatchingSnapshot(Admin admin,
String snapshotName, TableName tableName) throws IOException {
// list the snapshot
List<SnapshotDescription> snapshots = admin.listSnapshots();
List<SnapshotDescription> returnedSnapshots = new ArrayList<>();
for (SnapshotDescription sd : snapshots) {
if (snapshotName.equals(sd.getName()) && tableName.equals(sd.getTableName())) {
returnedSnapshots.add(sd);
}
}
Assert.assertTrue("No matching snapshots found.", returnedSnapshots.size() > 0);
return returnedSnapshots;
}
/**
* Make sure that there is only one snapshot returned from the master
*/
public static void assertOneSnapshotThatMatches(Admin admin,
SnapshotProtos.SnapshotDescription snapshot) throws IOException {
assertOneSnapshotThatMatches(admin, snapshot.getName(), TableName.valueOf(snapshot.getTable()));
}
/**
* Make sure that there is only one snapshot returned from the master and its name and table match
* the passed in parameters.
*/
public static List<SnapshotDescription> assertOneSnapshotThatMatches(Admin admin,
String snapshotName, TableName tableName) throws IOException {
// list the snapshot
List<SnapshotDescription> snapshots = admin.listSnapshots();
assertEquals("Should only have 1 snapshot", 1, snapshots.size());
assertEquals(snapshotName, snapshots.get(0).getName());
assertEquals(tableName, snapshots.get(0).getTableName());
return snapshots;
}
/**
* Make sure that there is only one snapshot returned from the master and its name and table match
* the passed in parameters.
*/
public static List<SnapshotDescription> assertOneSnapshotThatMatches(Admin admin, byte[] snapshot,
TableName tableName) throws IOException {
return assertOneSnapshotThatMatches(admin, Bytes.toString(snapshot), tableName);
}
public static void confirmSnapshotValid(HBaseTestingUtil testUtil,
SnapshotProtos.SnapshotDescription snapshotDescriptor, TableName tableName, byte[] family)
throws IOException {
MasterFileSystem mfs = testUtil.getHBaseCluster().getMaster().getMasterFileSystem();
confirmSnapshotValid(snapshotDescriptor, tableName, family, mfs.getRootDir(),
testUtil.getAdmin(), mfs.getFileSystem());
}
/**
* Confirm that the snapshot contains references to all the files that should be in the snapshot.
*/
public static void confirmSnapshotValid(SnapshotProtos.SnapshotDescription snapshotDescriptor,
TableName tableName, byte[] testFamily, Path rootDir, Admin admin, FileSystem fs)
throws IOException {
ArrayList nonEmptyTestFamilies = new ArrayList(1);
nonEmptyTestFamilies.add(testFamily);
confirmSnapshotValid(snapshotDescriptor, tableName, nonEmptyTestFamilies, null, rootDir, admin,
fs);
}
/**
* Confirm that the snapshot has no references files but only metadata.
*/
public static void confirmEmptySnapshotValid(
SnapshotProtos.SnapshotDescription snapshotDescriptor, TableName tableName, byte[] testFamily,
Path rootDir, Admin admin, FileSystem fs) throws IOException {
ArrayList emptyTestFamilies = new ArrayList(1);
emptyTestFamilies.add(testFamily);
confirmSnapshotValid(snapshotDescriptor, tableName, null, emptyTestFamilies, rootDir, admin,
fs);
}
/**
* Confirm that the snapshot contains references to all the files that should be in the snapshot.
* This method also perform some redundant check like the existence of the snapshotinfo or the
* regioninfo which are done always by the MasterSnapshotVerifier, at the end of the snapshot
* operation.
*/
public static void confirmSnapshotValid(SnapshotProtos.SnapshotDescription snapshotDescriptor,
TableName tableName, List<byte[]> nonEmptyTestFamilies, List<byte[]> emptyTestFamilies,
Path rootDir, Admin admin, FileSystem fs) throws IOException {
final Configuration conf = admin.getConfiguration();
// check snapshot dir
Path snapshotDir =
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotDescriptor, rootDir);
assertTrue("target snapshot directory, '" + snapshotDir + "', doesn't exist.",
fs.exists(snapshotDir));
SnapshotProtos.SnapshotDescription desc =
SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
// Extract regions and families with store files
final Set<byte[]> snapshotFamilies = new TreeSet<>(Bytes.BYTES_COMPARATOR);
SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, desc);
Map<String, SnapshotRegionManifest> regionManifests = manifest.getRegionManifestsMap();
for (SnapshotRegionManifest regionManifest : regionManifests.values()) {
SnapshotReferenceUtil.visitRegionStoreFiles(regionManifest,
new SnapshotReferenceUtil.StoreFileVisitor() {
@Override
public void storeFile(final RegionInfo regionInfo, final String family,
final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
snapshotFamilies.add(Bytes.toBytes(family));
}
});
}
// Verify that there are store files in the specified families
if (nonEmptyTestFamilies != null) {
for (final byte[] familyName : nonEmptyTestFamilies) {
assertTrue("Expected snapshot to contain family '" + Bytes.toString(familyName)
+ "', but it does not.", snapshotFamilies.contains(familyName));
}
}
// Verify that there are no store files in the specified families
if (emptyTestFamilies != null) {
for (final byte[] familyName : emptyTestFamilies) {
assertFalse("Expected snapshot to skip empty family '" + Bytes.toString(familyName)
+ "', but it is present.", snapshotFamilies.contains(familyName));
}
}
// check the region snapshot for all the regions
List<RegionInfo> regions = admin.getRegions(tableName);
// remove the non-default regions
RegionReplicaUtil.removeNonDefaultRegions(regions);
boolean hasMob =
regionManifests.containsKey(MobUtils.getMobRegionInfo(tableName).getEncodedName());
if (hasMob) {
assertEquals("Wrong number of regions.", regions.size(), regionManifests.size() - 1);
} else {
// if create snapshot when table splitting, parent region will be included to the snapshot
// region manifest. we should exclude the parent regions.
int regionCountExclusiveSplitParent = 0;
for (SnapshotRegionManifest snapshotRegionManifest : regionManifests.values()) {
RegionInfo hri = ProtobufUtil.toRegionInfo(snapshotRegionManifest.getRegionInfo());
if (hri.isOffline() && (hri.isSplit() || hri.isSplitParent())) {
continue;
}
regionCountExclusiveSplitParent++;
}
assertEquals("Wrong number of regions.", regions.size(), regionCountExclusiveSplitParent);
}
// Verify Regions (redundant check, see MasterSnapshotVerifier)
for (RegionInfo info : regions) {
String regionName = info.getEncodedName();
assertTrue("Missing region name: '" + regionName + "'",
regionManifests.containsKey(regionName));
}
}
/*
* Take snapshot with maximum of numTries attempts, ignoring CorruptedSnapshotException except for
* the last CorruptedSnapshotException
*/
public static void snapshot(Admin admin, final String snapshotName, final TableName tableName,
final SnapshotType type, final int numTries) throws IOException {
snapshot(admin, snapshotName, tableName, type, numTries, null);
}
/*
* Take snapshot having snapshot properties with maximum of numTries attempts, ignoring
* CorruptedSnapshotException except for the last CorruptedSnapshotException
*/
public static void snapshot(Admin admin, final String snapshotName, final TableName tableName,
final SnapshotType type, final int numTries, Map<String, Object> snapshotProps)
throws IOException {
int tries = 0;
CorruptedSnapshotException lastEx = null;
while (tries++ < numTries) {
try {
admin.snapshot(snapshotName, tableName, type, snapshotProps);
return;
} catch (CorruptedSnapshotException cse) {
LOG.warn("Got CorruptedSnapshotException", cse);
lastEx = cse;
}
}
throw lastEx;
}
public static void cleanupSnapshot(Admin admin, byte[] tableName) throws IOException {
SnapshotTestingUtils.cleanupSnapshot(admin, Bytes.toString(tableName));
}
public static void cleanupSnapshot(Admin admin, String snapshotName) throws IOException {
// delete the taken snapshot
admin.deleteSnapshot(snapshotName);
assertNoSnapshots(admin);
}
/**
* Expect the snapshot to throw an error when checking if the snapshot is complete
* @param master master to check
* @param snapshot the {@link SnapshotDescription} request to pass to the master
* @param clazz expected exception from the master
*/
public static void expectSnapshotDoneException(HMaster master, IsSnapshotDoneRequest snapshot,
Class<? extends HBaseSnapshotException> clazz) {
try {
master.getMasterRpcServices().isSnapshotDone(null, snapshot);
Assert.fail("didn't fail to lookup a snapshot");
} catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException se) {
try {
throw ProtobufUtil.handleRemoteException(se);
} catch (HBaseSnapshotException e) {
assertEquals("Threw wrong snapshot exception!", clazz, e.getClass());
} catch (Throwable t) {
Assert.fail("Threw an unexpected exception:" + t);
}
}
}
/**
* List all the HFiles in the given table
* @param fs FileSystem where the table lives
* @param tableDir directory of the table
* @return array of the current HFiles in the table (could be a zero-length array)
* @throws IOException on unexecpted error reading the FS
*/
public static ArrayList<String> listHFileNames(final FileSystem fs, final Path tableDir)
throws IOException {
final ArrayList<String> hfiles = new ArrayList<>();
FSVisitor.visitTableStoreFiles(fs, tableDir, new FSVisitor.StoreFileVisitor() {
@Override
public void storeFile(final String region, final String family, final String hfileName)
throws IOException {
hfiles.add(hfileName);
}
});
Collections.sort(hfiles);
return hfiles;
}
/**
* Take a snapshot of the specified table and verify that the given family is not empty. Note that
* this will leave the table disabled in the case of an offline snapshot.
*/
public static void createSnapshotAndValidate(Admin admin, TableName tableName, String familyName,
String snapshotNameString, Path rootDir, FileSystem fs, boolean onlineSnapshot)
throws Exception {
ArrayList<byte[]> nonEmptyFamilyNames = new ArrayList<>(1);
nonEmptyFamilyNames.add(Bytes.toBytes(familyName));
createSnapshotAndValidate(admin, tableName, nonEmptyFamilyNames, /* emptyFamilyNames= */ null,
snapshotNameString, rootDir, fs, onlineSnapshot);
}
/**
* Take a snapshot of the specified table and verify the given families. Note that this will leave
* the table disabled in the case of an offline snapshot.
*/
public static void createSnapshotAndValidate(Admin admin, TableName tableName,
List<byte[]> nonEmptyFamilyNames, List<byte[]> emptyFamilyNames, String snapshotNameString,
Path rootDir, FileSystem fs, boolean onlineSnapshot) throws Exception {
if (!onlineSnapshot) {
try {
LOG.info("prepping for offline snapshot.");
admin.disableTable(tableName);
} catch (TableNotEnabledException tne) {
LOG.info("In attempting to disable " + tableName + " it turns out that the this table is "
+ "already disabled.");
}
}
LOG.info("taking snapshot.");
admin.snapshot(snapshotNameString, tableName);
LOG.info("Confirming snapshot exists.");
List<SnapshotDescription> snapshots =
SnapshotTestingUtils.assertExistsMatchingSnapshot(admin, snapshotNameString, tableName);
if (snapshots == null || snapshots.size() != 1) {
Assert.fail("Incorrect number of snapshots for table " + tableName);
}
LOG.info("validating snapshot.");
SnapshotTestingUtils.confirmSnapshotValid(
ProtobufUtil.createHBaseProtosSnapshotDesc(snapshots.get(0)), tableName, nonEmptyFamilyNames,
emptyFamilyNames, rootDir, admin, fs);
}
/**
* Corrupt the specified snapshot by deleting some files.
* @param util {@link HBaseTestingUtil}
* @param snapshotName name of the snapshot to corrupt
* @return array of the corrupted HFiles
* @throws IOException on unexecpted error reading the FS
*/
public static ArrayList corruptSnapshot(final HBaseTestingUtil util, final String snapshotName)
throws IOException {
final MasterFileSystem mfs = util.getHBaseCluster().getMaster().getMasterFileSystem();
final FileSystem fs = mfs.getFileSystem();
Path snapshotDir =
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, mfs.getRootDir());
SnapshotProtos.SnapshotDescription snapshotDesc =
SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
final TableName table = TableName.valueOf(snapshotDesc.getTable());
final ArrayList corruptedFiles = new ArrayList();
final Configuration conf = util.getConfiguration();
SnapshotReferenceUtil.visitTableStoreFiles(conf, fs, snapshotDir, snapshotDesc,
new SnapshotReferenceUtil.StoreFileVisitor() {
@Override
public void storeFile(final RegionInfo regionInfo, final String family,
final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
String region = regionInfo.getEncodedName();
String hfile = storeFile.getName();
HFileLink link = HFileLink.build(conf, table, region, family, hfile);
if (corruptedFiles.size() % 2 == 0) {
fs.delete(link.getAvailablePath(fs), true);
corruptedFiles.add(hfile);
}
}
});
assertTrue(corruptedFiles.size() > 0);
return corruptedFiles;
}
// ==========================================================================
// Snapshot Mock
// ==========================================================================
public static class SnapshotMock {
protected final static String TEST_FAMILY = "cf";
public final static int TEST_NUM_REGIONS = 4;
private final Configuration conf;
private final FileSystem fs;
private final Path rootDir;
static class RegionData {
public RegionInfo hri;
public Path tableDir;
public Path[] files;
public RegionData(final Path tableDir, final RegionInfo hri, final int nfiles) {
this.tableDir = tableDir;
this.hri = hri;
this.files = new Path[nfiles];
}
}
public static class SnapshotBuilder {
private final RegionData[] tableRegions;
private final SnapshotProtos.SnapshotDescription desc;
private final TableDescriptor htd;
private final Configuration conf;
private final FileSystem fs;
private final Path rootDir;
private Path snapshotDir;
private int snapshotted = 0;
public SnapshotBuilder(final Configuration conf, final FileSystem fs, final Path rootDir,
final TableDescriptor htd, final SnapshotProtos.SnapshotDescription desc,
final RegionData[] tableRegions) throws IOException {
this.fs = fs;
this.conf = conf;
this.rootDir = rootDir;
this.htd = htd;
this.desc = desc;
this.tableRegions = tableRegions;
this.snapshotDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
FSTableDescriptors.createTableDescriptorForTableDirectory(
this.snapshotDir.getFileSystem(conf), snapshotDir, htd, false);
}
public TableDescriptor getTableDescriptor() {
return this.htd;
}
public SnapshotProtos.SnapshotDescription getSnapshotDescription() {
return this.desc;
}
public Path getSnapshotsDir() {
return this.snapshotDir;
}
public Path[] addRegion() throws IOException {
return addRegion(desc);
}
public Path[] addRegionV1() throws IOException {
return addRegion(
desc.toBuilder().setVersion(SnapshotManifestV1.DESCRIPTOR_VERSION).build());
}
public Path[] addRegionV2() throws IOException {
return addRegion(
desc.toBuilder().setVersion(SnapshotManifestV2.DESCRIPTOR_VERSION).build());
}
private Path[] addRegion(final SnapshotProtos.SnapshotDescription desc) throws IOException {
if (this.snapshotted == tableRegions.length) {
throw new UnsupportedOperationException("No more regions in the table");
}
RegionData regionData = tableRegions[this.snapshotted++];
ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getName());
SnapshotManifest manifest = SnapshotManifest.create(conf, fs, snapshotDir, desc, monitor);
manifest.addTableDescriptor(htd);
manifest.addRegion(regionData.tableDir, regionData.hri);
return regionData.files;
}
private void corruptFile(Path p) throws IOException {
String manifestName = p.getName();
// Rename the original region-manifest file
Path newP = new Path(p.getParent(), manifestName + "1");
fs.rename(p, newP);
// Create a new region-manifest file
FSDataOutputStream out = fs.create(p);
// Copy the first 25 bytes of the original region-manifest into the new one,
// make it a corrupted region-manifest file.
FSDataInputStream input = fs.open(newP);
byte[] buffer = new byte[25];
int len = input.read(0, buffer, 0, 25);
if (len > 1) {
out.write(buffer, 0, len - 1);
}
out.close();
// Delete the original region-manifest
fs.delete(newP);
}
/**
* Corrupt one region-manifest file
* @throws IOException on unexecpted error from the FS
*/
public void corruptOneRegionManifest() throws IOException {
FileStatus[] manifestFiles = CommonFSUtils.listStatus(fs, snapshotDir, new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith(SnapshotManifestV2.SNAPSHOT_MANIFEST_PREFIX);
}
});
if (manifestFiles.length == 0) return;
// Just choose the first one
Path p = manifestFiles[0].getPath();
corruptFile(p);
}
public void missOneRegionSnapshotFile() throws IOException {
FileStatus[] manifestFiles = CommonFSUtils.listStatus(fs, snapshotDir);
for (FileStatus fileStatus : manifestFiles) {
String fileName = fileStatus.getPath().getName();
if (
fileName.endsWith(SnapshotDescriptionUtils.SNAPSHOTINFO_FILE)
|| fileName.endsWith(".tabledesc")
|| fileName.endsWith(SnapshotDescriptionUtils.SNAPSHOT_TMP_DIR_NAME)
) {
fs.delete(fileStatus.getPath(), true);
}
}
}
/**
* Corrupt data-manifest file
* @throws IOException on unexecpted error from the FS
*/
public void corruptDataManifest() throws IOException {
FileStatus[] manifestFiles = CommonFSUtils.listStatus(fs, snapshotDir, new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith(SnapshotManifest.DATA_MANIFEST_NAME);
}
});
if (manifestFiles.length == 0) return;
// Just choose the first one
Path p = manifestFiles[0].getPath();
corruptFile(p);
}
public Path commit() throws IOException {
ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getName());
SnapshotManifest manifest = SnapshotManifest.create(conf, fs, snapshotDir, desc, monitor);
manifest.addTableDescriptor(htd);
manifest.consolidate();
Path finishedDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(desc, rootDir);
SnapshotDescriptionUtils.completeSnapshot(finishedDir, snapshotDir, fs,
snapshotDir.getFileSystem(conf), conf);
snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(desc, rootDir);
return snapshotDir;
}
public void consolidate() throws IOException {
ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getName());
SnapshotManifest manifest = SnapshotManifest.create(conf, fs, snapshotDir, desc, monitor);
manifest.addTableDescriptor(htd);
manifest.consolidate();
}
}
public SnapshotMock(final Configuration conf, final FileSystem fs, final Path rootDir) {
this.fs = fs;
this.conf = conf;
this.rootDir = rootDir;
}
public SnapshotBuilder createSnapshotV1(final String snapshotName, final String tableName)
throws IOException {
return createSnapshot(snapshotName, tableName, SnapshotManifestV1.DESCRIPTOR_VERSION);
}
public SnapshotBuilder createSnapshotV1(final String snapshotName, final String tableName,
final int numRegions) throws IOException {
return createSnapshot(snapshotName, tableName, numRegions,
SnapshotManifestV1.DESCRIPTOR_VERSION);
}
public SnapshotBuilder createSnapshotV2(final String snapshotName, final String tableName)
throws IOException {
return createSnapshot(snapshotName, tableName, SnapshotManifestV2.DESCRIPTOR_VERSION);
}
public SnapshotBuilder createSnapshotV2(final String snapshotName, final String tableName,
final int numRegions) throws IOException {
return createSnapshot(snapshotName, tableName, numRegions,
SnapshotManifestV2.DESCRIPTOR_VERSION);
}
public SnapshotBuilder createSnapshotV2(final String snapshotName, final String tableName,
final int numRegions, final long ttl) throws IOException {
return createSnapshot(snapshotName, tableName, numRegions,
SnapshotManifestV2.DESCRIPTOR_VERSION, ttl);
}
private SnapshotBuilder createSnapshot(final String snapshotName, final String tableName,
final int version) throws IOException {
return createSnapshot(snapshotName, tableName, TEST_NUM_REGIONS, version);
}
private SnapshotBuilder createSnapshot(final String snapshotName, final String tableName,
final int numRegions, final int version) throws IOException {
TableDescriptor htd = createHtd(tableName);
RegionData[] regions = createTable(htd, numRegions);
SnapshotProtos.SnapshotDescription desc = SnapshotProtos.SnapshotDescription.newBuilder()
.setTable(htd.getTableName().getNameAsString()).setName(snapshotName).setVersion(version)
.build();
Path workingDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
FileSystem workingFs = workingDir.getFileSystem(conf);
SnapshotDescriptionUtils.writeSnapshotInfo(desc, workingDir, workingFs);
return new SnapshotBuilder(conf, fs, rootDir, htd, desc, regions);
}
private SnapshotBuilder createSnapshot(final String snapshotName, final String tableName,
final int numRegions, final int version, final long ttl) throws IOException {
TableDescriptor htd = createHtd(tableName);
RegionData[] regions = createTable(htd, numRegions);
SnapshotProtos.SnapshotDescription desc = SnapshotProtos.SnapshotDescription.newBuilder()
.setTable(htd.getTableName().getNameAsString()).setName(snapshotName).setVersion(version)
.setCreationTime(EnvironmentEdgeManager.currentTime()).setTtl(ttl).build();
Path workingDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf);
SnapshotDescriptionUtils.writeSnapshotInfo(desc, workingDir, fs);
return new SnapshotBuilder(conf, fs, rootDir, htd, desc, regions);
}
public TableDescriptor createHtd(final String tableName) {
return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build();
}
private RegionData[] createTable(final TableDescriptor htd, final int nregions)
throws IOException {
Path tableDir = CommonFSUtils.getTableDir(rootDir, htd.getTableName());
new FSTableDescriptors(conf).createTableDescriptorForTableDirectory(tableDir, htd, false);
assertTrue(nregions % 2 == 0);
RegionData[] regions = new RegionData[nregions];
for (int i = 0; i < regions.length; i += 2) {
byte[] startKey = Bytes.toBytes(0 + i * 2);
byte[] endKey = Bytes.toBytes(1 + i * 2);
// First region, simple with one plain hfile.
RegionInfo hri = RegionInfoBuilder.newBuilder(htd.getTableName()).setStartKey(startKey)
.setEndKey(endKey).build();
HRegionFileSystem rfs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, hri);
regions[i] = new RegionData(tableDir, hri, 3);
for (int j = 0; j < regions[i].files.length; ++j) {
Path storeFile = createStoreFile(rfs.createTempName());
regions[i].files[j] = rfs.commitStoreFile(TEST_FAMILY, storeFile);
}
// Second region, used to test the split case.
// This region contains a reference to the hfile in the first region.
startKey = Bytes.toBytes(2 + i * 2);
endKey = Bytes.toBytes(3 + i * 2);
hri = RegionInfoBuilder.newBuilder(htd.getTableName()).build();
rfs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, hri);
regions[i + 1] = new RegionData(tableDir, hri, regions[i].files.length);
for (int j = 0; j < regions[i].files.length; ++j) {
String refName = regions[i].files[j].getName() + '.' + regions[i].hri.getEncodedName();
Path refFile = createStoreFile(new Path(rootDir, refName));
regions[i + 1].files[j] = rfs.commitStoreFile(TEST_FAMILY, refFile);
}
}
return regions;
}
private Path createStoreFile(final Path storeFile) throws IOException {
FSDataOutputStream out = fs.create(storeFile);
try {
out.write(Bytes.toBytes(storeFile.toString()));
} finally {
out.close();
}
return storeFile;
}
}
// ==========================================================================
// Table Helpers
// ==========================================================================
public static void waitForTableToBeOnline(final HBaseTestingUtil util, final TableName tableName)
throws IOException, InterruptedException {
HRegionServer rs = util.getRSForFirstRegionInTable(tableName);
List<HRegion> onlineRegions = rs.getRegions(tableName);
for (HRegion region : onlineRegions) {
region.waitForFlushesAndCompactions();
}
// Wait up to 60 seconds for a table to be available.
util.waitFor(60000, util.predicateTableAvailable(tableName));
}
public static void createTable(final HBaseTestingUtil util, final TableName tableName,
int regionReplication, int nRegions, final byte[]... families)
throws IOException, InterruptedException {
TableDescriptorBuilder builder =
TableDescriptorBuilder.newBuilder(tableName).setRegionReplication(regionReplication);
for (byte[] family : families) {
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
}
byte[][] splitKeys = getSplitKeys(nRegions);
util.createTable(builder.build(), splitKeys);
assertEquals((splitKeys.length + 1) * regionReplication,
util.getAdmin().getRegions(tableName).size());
}
public static byte[][] getSplitKeys() {
return getSplitKeys(KEYS.length);
}
public static byte[][] getSplitKeys(int nRegions) {
nRegions = nRegions < KEYS.length ? nRegions : (KEYS.length - 1);
final byte[][] splitKeys = new byte[nRegions - 1][];
final int step = KEYS.length / nRegions;
int keyIndex = 1;
for (int i = 0; i < splitKeys.length; ++i) {
splitKeys[i] = new byte[] { KEYS[keyIndex] };
keyIndex += step;
}
return splitKeys;
}
public static void createTable(final HBaseTestingUtil util, final TableName tableName,
final byte[]... families) throws IOException, InterruptedException {
createTable(util, tableName, 1, families);
}
public static void createTable(final HBaseTestingUtil util, final TableName tableName,
final int regionReplication, final byte[]... families)
throws IOException, InterruptedException {
createTable(util, tableName, regionReplication, KEYS.length, families);
}
public static void createPreSplitTable(final HBaseTestingUtil util, final TableName tableName,
final int nRegions, final byte[]... families) throws IOException, InterruptedException {
createTable(util, tableName, 1, nRegions, families);
}
public static void loadData(final HBaseTestingUtil util, final TableName tableName, int rows,
byte[]... families) throws IOException, InterruptedException {
BufferedMutator mutator = util.getConnection().getBufferedMutator(tableName);
loadData(util, mutator, rows, families);
}
public static void loadData(final HBaseTestingUtil util, final BufferedMutator mutator, int rows,
byte[]... families) throws IOException, InterruptedException {
// Ensure one row per region
assertTrue(rows >= KEYS.length);
for (byte k0 : KEYS) {
byte[] k = new byte[] { k0 };
byte[] value = Bytes.add(Bytes.toBytes(EnvironmentEdgeManager.currentTime()), k);
byte[] key = Bytes.add(k, Bytes.toBytes(MD5Hash.getMD5AsHex(value)));
final byte[][] families1 = families;
final byte[] key1 = key;
final byte[] value1 = value;
mutator.mutate(createPut(families1, key1, value1));
rows--;
}
// Add other extra rows. more rows, more files
while (rows-- > 0) {
byte[] value =
Bytes.add(Bytes.toBytes(EnvironmentEdgeManager.currentTime()), Bytes.toBytes(rows));
byte[] key = Bytes.toBytes(MD5Hash.getMD5AsHex(value));
final byte[][] families1 = families;
final byte[] key1 = key;
final byte[] value1 = value;
mutator.mutate(createPut(families1, key1, value1));
}
mutator.flush();
waitForTableToBeOnline(util, mutator.getName());
}
private static Put createPut(final byte[][] families, final byte[] key, final byte[] value) {
byte[] q = Bytes.toBytes("q");
Put put = new Put(key);
put.setDurability(Durability.SKIP_WAL);
for (byte[] family : families) {
put.addColumn(family, q, value);
}
return put;
}
public static void deleteAllSnapshots(final Admin admin) throws IOException {
// Delete all the snapshots
for (SnapshotDescription snapshot : admin.listSnapshots()) {
admin.deleteSnapshot(snapshot.getName());
}
SnapshotTestingUtils.assertNoSnapshots(admin);
}
public static void deleteArchiveDirectory(final HBaseTestingUtil util) throws IOException {
// Ensure the archiver to be empty
MasterFileSystem mfs = util.getMiniHBaseCluster().getMaster().getMasterFileSystem();
Path archiveDir = new Path(mfs.getRootDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
mfs.getFileSystem().delete(archiveDir, true);
}
public static void verifyRowCount(final HBaseTestingUtil util, final TableName tableName,
long expectedRows) throws IOException {
Table table = util.getConnection().getTable(tableName);
try {
assertEquals(expectedRows, util.countRows(table));
} finally {
table.close();
}
}
public static void verifyReplicasCameOnline(TableName tableName, Admin admin,
int regionReplication) throws IOException {
List<RegionInfo> regions = admin.getRegions(tableName);
HashSet<RegionInfo> set = new HashSet<>();
for (RegionInfo hri : regions) {
set.add(RegionReplicaUtil.getRegionInfoForDefaultReplica(hri));
for (int i = 0; i < regionReplication; i++) {
RegionInfo replica = RegionReplicaUtil.getRegionInfoForReplica(hri, i);
if (!regions.contains(replica)) {
Assert.fail(replica + " is not contained in the list of online regions");
}
}
}
assertEquals(getSplitKeys().length + 1, set.size());
}
}
|
googleapis/google-cloud-java | 36,727 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListSessionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/conversational_search_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Response for ListSessions method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListSessionsResponse}
*/
public final class ListSessionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListSessionsResponse)
ListSessionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSessionsResponse.newBuilder() to construct.
private ListSessionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSessionsResponse() {
sessions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSessionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListSessionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListSessionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.Builder.class);
}
public static final int SESSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Session> sessions_;
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Session> getSessionsList() {
return sessions_;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>
getSessionsOrBuilderList() {
return sessions_;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
@java.lang.Override
public int getSessionsCount() {
return sessions_.size();
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.Session getSessions(int index) {
return sessions_.get(index);
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder getSessionsOrBuilder(int index) {
return sessions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < sessions_.size(); i++) {
output.writeMessage(1, sessions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sessions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sessions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse other =
(com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse) obj;
if (!getSessionsList().equals(other.getSessionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSessionsCount() > 0) {
hash = (37 * hash) + SESSIONS_FIELD_NUMBER;
hash = (53 * hash) + getSessionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListSessions method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListSessionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListSessionsResponse)
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListSessionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListSessionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (sessionsBuilder_ == null) {
sessions_ = java.util.Collections.emptyList();
} else {
sessions_ = null;
sessionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListSessionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse build() {
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse buildPartial() {
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse result =
new com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse result) {
if (sessionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
sessions_ = java.util.Collections.unmodifiableList(sessions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sessions_ = sessions_;
} else {
result.sessions_ = sessionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse.getDefaultInstance())
return this;
if (sessionsBuilder_ == null) {
if (!other.sessions_.isEmpty()) {
if (sessions_.isEmpty()) {
sessions_ = other.sessions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSessionsIsMutable();
sessions_.addAll(other.sessions_);
}
onChanged();
}
} else {
if (!other.sessions_.isEmpty()) {
if (sessionsBuilder_.isEmpty()) {
sessionsBuilder_.dispose();
sessionsBuilder_ = null;
sessions_ = other.sessions_;
bitField0_ = (bitField0_ & ~0x00000001);
sessionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSessionsFieldBuilder()
: null;
} else {
sessionsBuilder_.addAllMessages(other.sessions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1alpha.Session m =
input.readMessage(
com.google.cloud.discoveryengine.v1alpha.Session.parser(),
extensionRegistry);
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(m);
} else {
sessionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Session> sessions_ =
java.util.Collections.emptyList();
private void ensureSessionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
sessions_ =
new java.util.ArrayList<com.google.cloud.discoveryengine.v1alpha.Session>(sessions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Session,
com.google.cloud.discoveryengine.v1alpha.Session.Builder,
com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>
sessionsBuilder_;
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Session> getSessionsList() {
if (sessionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(sessions_);
} else {
return sessionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public int getSessionsCount() {
if (sessionsBuilder_ == null) {
return sessions_.size();
} else {
return sessionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Session getSessions(int index) {
if (sessionsBuilder_ == null) {
return sessions_.get(index);
} else {
return sessionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder setSessions(int index, com.google.cloud.discoveryengine.v1alpha.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.set(index, value);
onChanged();
} else {
sessionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder setSessions(
int index, com.google.cloud.discoveryengine.v1alpha.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.set(index, builderForValue.build());
onChanged();
} else {
sessionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder addSessions(com.google.cloud.discoveryengine.v1alpha.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.add(value);
onChanged();
} else {
sessionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder addSessions(int index, com.google.cloud.discoveryengine.v1alpha.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.add(index, value);
onChanged();
} else {
sessionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder addSessions(
com.google.cloud.discoveryengine.v1alpha.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(builderForValue.build());
onChanged();
} else {
sessionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder addSessions(
int index, com.google.cloud.discoveryengine.v1alpha.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(index, builderForValue.build());
onChanged();
} else {
sessionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder addAllSessions(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1alpha.Session> values) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sessions_);
onChanged();
} else {
sessionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder clearSessions() {
if (sessionsBuilder_ == null) {
sessions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sessionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public Builder removeSessions(int index) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.remove(index);
onChanged();
} else {
sessionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Session.Builder getSessionsBuilder(int index) {
return getSessionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder getSessionsOrBuilder(
int index) {
if (sessionsBuilder_ == null) {
return sessions_.get(index);
} else {
return sessionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>
getSessionsOrBuilderList() {
if (sessionsBuilder_ != null) {
return sessionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sessions_);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Session.Builder addSessionsBuilder() {
return getSessionsFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Session.Builder addSessionsBuilder(int index) {
return getSessionsFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Session sessions = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Session.Builder>
getSessionsBuilderList() {
return getSessionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Session,
com.google.cloud.discoveryengine.v1alpha.Session.Builder,
com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>
getSessionsFieldBuilder() {
if (sessionsBuilder_ == null) {
sessionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Session,
com.google.cloud.discoveryengine.v1alpha.Session.Builder,
com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>(
sessions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
sessions_ = null;
}
return sessionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListSessionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListSessionsResponse)
private static final com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse();
}
public static com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSessionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSessionsResponse>() {
@java.lang.Override
public ListSessionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSessionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSessionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListSessionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/directory-scimple | 36,989 | scim-server/src/main/java/org/apache/directory/scim/server/rest/BulkResourceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.directory.scim.server.rest;
import java.util.*;
import java.util.regex.Pattern;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.Response.Status;
import jakarta.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
import org.apache.directory.scim.spec.exception.ResourceException;
import org.apache.directory.scim.server.exception.UnableToCreateResourceException;
import org.apache.directory.scim.server.exception.UnableToDeleteResourceException;
import org.apache.directory.scim.server.exception.UnableToRetrieveResourceException;
import org.apache.directory.scim.server.exception.UnableToUpdateResourceException;
import org.apache.directory.scim.core.repository.Repository;
import org.apache.directory.scim.core.repository.RepositoryRegistry;
import org.apache.directory.scim.protocol.BulkResource;
import org.apache.directory.scim.protocol.data.BulkOperation;
import org.apache.directory.scim.protocol.data.BulkOperation.Method;
import org.apache.directory.scim.protocol.data.BulkOperation.StatusWrapper;
import org.apache.directory.scim.protocol.data.BulkRequest;
import org.apache.directory.scim.protocol.data.BulkResponse;
import org.apache.directory.scim.protocol.data.ErrorResponse;
import org.apache.directory.scim.spec.resources.BaseResource;
import org.apache.directory.scim.spec.resources.ScimResource;
import org.apache.directory.scim.spec.schema.Schema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.directory.scim.core.schema.SchemaRegistry;
@ApplicationScoped
public class BulkResourceImpl implements BulkResource {
/** A logger for this class */
private static final Logger log = LoggerFactory.getLogger(BulkResourceImpl.class);
// private static final StatusWrapper OKAY_STATUS = new StatusWrapper();
// private static final StatusWrapper CREATED_STATUS = new StatusWrapper();
// private static final StatusWrapper NO_CONTENT_STATUS = new StatusWrapper();
// private static final StatusWrapper METHOD_NOT_ALLOWED_STATUS = new StatusWrapper();
// private static final StatusWrapper CONFLICT_STATUS = new StatusWrapper();
// private static final StatusWrapper CLIENT_ERROR_STATUS = new StatusWrapper();
// private static final StatusWrapper NOT_FOUND_STATUS = new StatusWrapper();
// private static final StatusWrapper INTERNAL_SERVER_ERROR_STATUS = new StatusWrapper();
// private static final StatusWrapper METHOD_NOT_IMPLEMENTED_STATUS = new StatusWrapper();
// private static final String OKAY = "200";
// private static final String CREATED = "201";
// private static final String NO_CONTENT = "204";
// private static final String CLIENT_ERROR = "400";
// private static final String NOT_FOUND = "404";
// private static final String METHOD_NOT_ALLOWED = "405";
// private static final String CONFLICT = "409";
// private static final String INTERNAL_SERVER_ERROR = "500";
// private static final String METHOD_NOT_IMPLEMENTED = "501";
private static final String BULK_ID_DOES_NOT_EXIST = "Bulk ID cannot be resolved because it refers to no bulkId in any Bulk Operation: %s";
private static final String BULK_ID_REFERS_TO_FAILED_RESOURCE = "Bulk ID cannot be resolved because the resource it refers to had failed to be created: %s";
private static final String OPERATION_DEPENDS_ON_FAILED_OPERATION = "Operation depends on failed bulk operation: %s";
private static final Pattern PATH_PATTERN = Pattern.compile("^/[^/]+/[^/]+$");
// static {
// METHOD_NOT_ALLOWED_STATUS.setCode(METHOD_NOT_ALLOWED);
// OKAY_STATUS.setCode(OKAY);
// CREATED_STATUS.setCode(CREATED);
// NO_CONTENT_STATUS.setCode(NO_CONTENT);
// CONFLICT_STATUS.setCode(CONFLICT);
// CLIENT_ERROR_STATUS.setCode(CLIENT_ERROR);
// NOT_FOUND_STATUS.setCode(NOT_FOUND);
// INTERNAL_SERVER_ERROR_STATUS.setCode(INTERNAL_SERVER_ERROR);
// METHOD_NOT_IMPLEMENTED_STATUS.setCode(METHOD_NOT_IMPLEMENTED);
// }
private final SchemaRegistry schemaRegistry;
private final RepositoryRegistry repositoryRegistry;
@Inject
public BulkResourceImpl(SchemaRegistry schemaRegistry, RepositoryRegistry repositoryRegistry) {
this.schemaRegistry = schemaRegistry;
this.repositoryRegistry = repositoryRegistry;
}
public BulkResourceImpl() {
// CDI
this(null, null);
}
@Override
public Response doBulk(BulkRequest request, UriInfo uriInfo) {
BulkResponse response;
int errorCount = 0;
Integer requestFailOnErrors = request.getFailOnErrors();
int maxErrorCount = requestFailOnErrors != null && requestFailOnErrors > 0 ? requestFailOnErrors : Integer.MAX_VALUE;
int errorCountIncrement = requestFailOnErrors == null || requestFailOnErrors > 0 ? 1 : 0;
List<BulkOperation> bulkOperations = request.getOperations();
Map<String, BulkOperation> bulkIdKeyToOperationResult = new HashMap<>();
List<IWishJavaHadTuples> allUnresolveds = new ArrayList<>();
Map<String, Set<String>> reverseDependenciesGraph = this.generateReverseDependenciesGraph(bulkOperations);
Map<String, Set<String>> transitiveReverseDependencies = generateTransitiveDependenciesGraph(reverseDependenciesGraph);
log.debug("Reverse dependencies: {}", reverseDependenciesGraph);
log.debug("Transitive reverse dependencies: {}", transitiveReverseDependencies);
// clean out unwanted data
for (BulkOperation operationRequest : bulkOperations) {
operationRequest.setResponse(null);
operationRequest.setStatus(null);
}
// get all known bulkIds, handle bad input
for (BulkOperation operationRequest : bulkOperations) {
String bulkId = operationRequest.getBulkId();
Method method = operationRequest.getMethod();
String bulkIdKey = bulkId != null ? "bulkId:" + bulkId : null;
boolean errorOccurred = false;
// duplicate bulkId
if (bulkIdKey != null) {
if (!bulkIdKeyToOperationResult.containsKey(bulkIdKey)) {
bulkIdKeyToOperationResult.put(bulkIdKey, operationRequest);
} else {
errorOccurred = true;
BulkOperation duplicateOperation = bulkIdKeyToOperationResult.get(bulkIdKey);
createAndSetErrorResponse(operationRequest, Status.CONFLICT, "Duplicate bulkId");
if (!(duplicateOperation.getResponse() instanceof ErrorResponse)) {
duplicateOperation.setData(null);
createAndSetErrorResponse(duplicateOperation, Status.CONFLICT, "Duplicate bulkId");
}
}
}
// bad/missing input for method
if (method != null && !(operationRequest.getResponse() instanceof ErrorResponse)) {
switch (method) {
case POST:
case PUT: {
if (operationRequest.getData() == null) {
errorOccurred = true;
createAndSetErrorResponse(operationRequest, Status.BAD_REQUEST, "data not provided");
}
}
break;
case DELETE: {
String path = operationRequest.getPath();
if (path == null) {
errorOccurred = true;
createAndSetErrorResponse(operationRequest, Status.BAD_REQUEST, "path not provided");
} else if (!PATH_PATTERN.matcher(path)
.matches()) {
errorOccurred = true;
createAndSetErrorResponse(operationRequest, Status.BAD_REQUEST, "path is not a valid path (e.g. \"/Groups/123abc\", \"/Users/123xyz\", ...)");
} else {
String endPoint = path.substring(0, path.lastIndexOf('/'));
Class<ScimResource> clazz = (Class<ScimResource>) schemaRegistry.getScimResourceClassFromEndpoint(endPoint);
if (clazz == null) {
errorOccurred = true;
createAndSetErrorResponse(operationRequest, Status.BAD_REQUEST, "path does not contain a recognized endpoint (e.g. \"/Groups/...\", \"/Users/...\", ...)");
}
}
}
break;
case PATCH: {
errorOccurred = true;
createAndSetErrorResponse(operationRequest, Status.NOT_IMPLEMENTED, "Method not implemented: PATCH");
}
break;
default: {
}
break;
}
} else if (method == null) {
errorOccurred = true;
operationRequest.setData(null);
createAndSetErrorResponse(operationRequest, Status.BAD_REQUEST, "no method provided (e.g. PUT, POST, ...");
}
if (errorOccurred) {
operationRequest.setData(null);
if (bulkIdKey != null) {
Set<String> reverseDependencies = transitiveReverseDependencies.getOrDefault(bulkIdKey, Collections.emptySet());
String detail = String.format(OPERATION_DEPENDS_ON_FAILED_OPERATION, bulkIdKey);
for (String dependentBulkIdKey : reverseDependencies) {
BulkOperation dependentOperation = bulkIdKeyToOperationResult.get(dependentBulkIdKey);
if (!(dependentOperation.getResponse() instanceof ErrorResponse)) {
dependentOperation.setData(null);
createAndSetErrorResponse(dependentOperation, Status.CONFLICT, detail);
}
}
}
}
}
boolean errorCountExceeded = false;
// do the operations
for (BulkOperation operationResult : bulkOperations) {
if (!errorCountExceeded && !(operationResult.getResponse() instanceof ErrorResponse)) {
try {
this.handleBulkOperationMethod(allUnresolveds, operationResult, bulkIdKeyToOperationResult, uriInfo);
} catch (ResourceException resourceException) {
log.error("Failed to do bulk operation", resourceException);
errorCount += errorCountIncrement;
errorCountExceeded = errorCount >= maxErrorCount;
String detail = resourceException.getLocalizedMessage();
createAndSetErrorResponse(operationResult, resourceException.getStatus(), detail);
if (operationResult.getBulkId() != null) {
String bulkIdKey = "bulkId:" + operationResult.getBulkId();
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
operationResult.setData(null);
}
} catch (UnresolvableOperationException unresolvableOperationException) {
log.error("Could not resolve bulkId during Bulk Operation method handling", unresolvableOperationException);
errorCount += errorCountIncrement;
String detail = unresolvableOperationException.getLocalizedMessage();
createAndSetErrorResponse(operationResult, Status.CONFLICT, detail);
if (operationResult.getBulkId() != null) {
String bulkIdKey = "bulkId:" + operationResult.getBulkId();
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
operationResult.setData(null);
}
}
} else if (errorCountExceeded) {
// continue processing bulk operations to cleanup any dependencies
createAndSetErrorResponse(operationResult, Status.CONFLICT, "failOnErrors count reached");
if (operationResult.getBulkId() != null) {
String bulkIdKey = "bulkId:" + operationResult.getBulkId();
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
}
}
}
// Resolve unresolved bulkIds
for (IWishJavaHadTuples iwjht : allUnresolveds) {
BulkOperation bulkOperationResult = iwjht.bulkOperationResult;
String bulkIdKey = iwjht.bulkIdKey;
ScimResource scimResource = bulkOperationResult.getData();
try {
for (UnresolvedTopLevel unresolved : iwjht.unresolveds) {
log.debug("Final resolution pass for {}", unresolved);
unresolved.resolve(scimResource, bulkIdKeyToOperationResult);
}
String scimResourceId = scimResource.getId();
@SuppressWarnings("unchecked")
Class<ScimResource> scimResourceClass = (Class<ScimResource>) scimResource.getClass();
Repository<ScimResource> repository = repositoryRegistry.getRepository(scimResourceClass);
repository.update(scimResourceId, null, scimResource, Collections.emptySet(), Collections.emptySet());
} catch (UnresolvableOperationException unresolvableOperationException) {
log.error("Could not complete final resolution pass, unresolvable bulkId", unresolvableOperationException);
String detail = unresolvableOperationException.getLocalizedMessage();
bulkOperationResult.setData(null);
bulkOperationResult.setLocation(null);
createAndSetErrorResponse(bulkOperationResult, Status.CONFLICT, detail);
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
} catch (UnableToUpdateResourceException unableToUpdateResourceException) {
log.error("Failed to update Scim Resource with resolved bulkIds", unableToUpdateResourceException);
String detail = unableToUpdateResourceException.getLocalizedMessage();
bulkOperationResult.setData(null);
bulkOperationResult.setLocation(null);
createAndSetErrorResponse(bulkOperationResult, unableToUpdateResourceException.getStatus(), detail);
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
} catch (ResourceException e) {
log.error("Could not complete final resolution pass, unresolvable bulkId", e);
String detail = e.getLocalizedMessage();
bulkOperationResult.setData(null);
bulkOperationResult.setLocation(null);
createAndSetErrorResponse(bulkOperationResult, Status.NOT_FOUND, detail);
this.cleanup(bulkIdKey, transitiveReverseDependencies, bulkIdKeyToOperationResult);
}
}
Status status = errorCountExceeded ? Status.BAD_REQUEST : Status.OK;
response = new BulkResponse()
.setOperations(bulkOperations)
.setStatus(status);
return Response.status(status)
.entity(response)
.build();
}
/**
* Delete resources that depend on {@code bulkIdKeyToCleanup}, remove
* {@link BulkOperation}s data, and set their code and response
*
* @param bulkIdKeyToCleanup
* @param transitiveReverseDependencies
* @param bulkIdKeyToOperationResult
*/
private void cleanup(String bulkIdKeyToCleanup, Map<String, Set<String>> transitiveReverseDependencies, Map<String, BulkOperation> bulkIdKeyToOperationResult) {
Set<String> reverseDependencies = transitiveReverseDependencies.getOrDefault(bulkIdKeyToCleanup, Collections.emptySet());
BulkOperation operationResult = bulkIdKeyToOperationResult.get(bulkIdKeyToCleanup);
String bulkId = operationResult.getBulkId();
ScimResource scimResource = operationResult.getData();
@SuppressWarnings("unchecked")
Class<ScimResource> scimResourceClass = (Class<ScimResource>) scimResource.getClass();
Repository<ScimResource> repository = this.repositoryRegistry.getRepository(scimResourceClass);
try {
if (StringUtils.isNotBlank(scimResource.getId())) {
repository.delete(scimResource.getId());
}
} catch (ResourceException unableToDeleteResourceException) {
log.error("Could not delete ScimResource after failure: {}", scimResource);
}
for (String dependentBulkIdKey : reverseDependencies) {
BulkOperation dependentOperationResult = bulkIdKeyToOperationResult.get(dependentBulkIdKey);
if (!(dependentOperationResult.getResponse() instanceof ErrorResponse))
try {
ScimResource dependentResource = dependentOperationResult.getData();
String dependentResourceId = dependentResource.getId();
@SuppressWarnings("unchecked")
Class<ScimResource> dependentResourceClass = (Class<ScimResource>) dependentResource.getClass();
Repository<ScimResource> dependentResourceRepository = this.repositoryRegistry.getRepository(dependentResourceClass);
dependentOperationResult.setData(null);
dependentOperationResult.setLocation(null);
createAndSetErrorResponse(dependentOperationResult, Status.CONFLICT, String.format(OPERATION_DEPENDS_ON_FAILED_OPERATION, bulkId, dependentBulkIdKey));
dependentResourceRepository.delete(dependentResourceId);
} catch (ResourceException unableToDeleteResourceException) {
log.error("Could not delete depenedent ScimResource after failing to update dependee", unableToDeleteResourceException);
}
}
}
/**
* Based on the method requested by {@code operationResult}, invoke that
* method. Fill {@code unresolveds} with unresolved bulkIds and complexes that
* contain unresolved bulkIds.
*
* @param unresolveds
* @param operationResult
* @param bulkIdKeyToOperationResult
* @param uriInfo
* @throws UnableToCreateResourceException
* @throws UnableToDeleteResourceException
* @throws UnableToUpdateResourceException
* @throws UnresolvableOperationException
*/
private void handleBulkOperationMethod(List<IWishJavaHadTuples> unresolveds, BulkOperation operationResult, Map<String, BulkOperation> bulkIdKeyToOperationResult, UriInfo uriInfo) throws ResourceException, UnresolvableOperationException {
ScimResource scimResource = operationResult.getData();
Method bulkOperationMethod = operationResult.getMethod();
String bulkId = operationResult.getBulkId();
Class<ScimResource> scimResourceClass;
if (scimResource == null) {
String path = operationResult.getPath();
String endPoint = path.substring(0, path.lastIndexOf('/'));
Class<ScimResource> clazz = (Class<ScimResource>) schemaRegistry.getScimResourceClassFromEndpoint(endPoint);
scimResourceClass = clazz;
} else {
@SuppressWarnings("unchecked")
Class<ScimResource> clazz = (Class<ScimResource>) scimResource.getClass();
scimResourceClass = clazz;
}
Repository<ScimResource> repository = repositoryRegistry.getRepository(scimResourceClass);
switch (bulkOperationMethod) {
case POST: {
log.debug("POST: {}", scimResource);
this.resolveTopLevel(unresolveds, operationResult, bulkIdKeyToOperationResult);
log.debug("Creating {}", scimResource);
ScimResource newScimResource = repository.create(scimResource, Collections.emptySet(), Collections.emptySet());
String bulkOperationPath = operationResult.getPath();
String newResourceId = newScimResource.getId();
String newResourceUri = uriInfo.getBaseUriBuilder()
.path(bulkOperationPath)
.path(newResourceId)
.build()
.toString();
if (bulkId != null) {
String bulkIdKey = "bulkId:" + bulkId;
log.debug("adding {} = {}", bulkIdKey, newResourceId);
bulkIdKeyToOperationResult.get(bulkIdKey)
.setData(newScimResource);
}
operationResult.setData(newScimResource);
operationResult.setLocation(newResourceUri);
operationResult.setPath(null);
operationResult.setStatus(StatusWrapper.wrap(Status.CREATED));
}
break;
case DELETE: {
log.debug("DELETE: {}", operationResult.getPath());
String scimResourceId = operationResult.getPath()
.substring(operationResult.getPath()
.lastIndexOf("/")
+ 1);
repository.delete(scimResourceId);
operationResult.setStatus(StatusWrapper.wrap(Status.NO_CONTENT));
}
break;
case PUT: {
log.debug("PUT: {}", scimResource);
this.resolveTopLevel(unresolveds, operationResult, bulkIdKeyToOperationResult);
String id = operationResult.getPath()
.substring(operationResult.getPath()
.lastIndexOf("/")
+ 1);
try {
repository.update(id, null, scimResource, Collections.emptySet(), Collections.emptySet());
operationResult.setStatus(StatusWrapper.wrap(Status.OK));
} catch (UnableToRetrieveResourceException e) {
operationResult.setStatus(StatusWrapper.wrap(Status.NOT_FOUND));
}
}
break;
default: {
BulkOperation.Method method = operationResult.getMethod();
String detail = "Method not allowed: " + method;
log.error("Received unallowed method: {}", method);
createAndSetErrorResponse(operationResult, Status.METHOD_NOT_ALLOWED, detail);
}
break;
}
}
private static void createAndSetErrorResponse(BulkOperation operationResult, int statusCode, String detail) {
createAndSetErrorResponse(operationResult, Status.fromStatusCode(statusCode), detail);
}
private static void createAndSetErrorResponse(BulkOperation operationResult, Status status, String detail) {
ErrorResponse error = new ErrorResponse(status, detail);
operationResult.setResponse(error);
operationResult.setStatus(new StatusWrapper(status));
operationResult.setPath(null);
}
private static class IWishJavaHadTuples {
public final String bulkIdKey;
public final List<UnresolvedTopLevel> unresolveds;
public final BulkOperation bulkOperationResult;
public IWishJavaHadTuples(String bulkIdKey, List<UnresolvedTopLevel> unresolveds, BulkOperation bulkOperationResult) {
this.bulkIdKey = bulkIdKey;
this.unresolveds = unresolveds;
this.bulkOperationResult = bulkOperationResult;
}
}
private static class UnresolvableOperationException extends Exception {
private static final long serialVersionUID = -6081994707016671935L;
public UnresolvableOperationException(String message) {
super(message);
}
}
private static class UnresolvedComplex {
private final Object object;
private final Schema.AttributeAccessor accessor;
private final String bulkIdKey;
public UnresolvedComplex(Object object, Schema.AttributeAccessor accessor, String bulkIdKey) {
this.object = object;
this.accessor = accessor;
this.bulkIdKey = bulkIdKey;
}
public void resolve(Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException {
BulkOperation resolvedOperation = bulkIdKeyToOperationResult.get(this.bulkIdKey);
BaseResource response = resolvedOperation.getResponse();
ScimResource resolvedResource = resolvedOperation.getData();
if ((response == null || !(response instanceof ErrorResponse)) && resolvedResource != null) {
String resolvedId = resolvedResource.getId();
this.accessor.set(this.object, resolvedId);
} else {
throw new UnresolvableOperationException(String.format(BULK_ID_REFERS_TO_FAILED_RESOURCE, this.bulkIdKey));
}
}
}
private static abstract class UnresolvedTopLevel {
protected final Schema.AttributeAccessor accessor;
public UnresolvedTopLevel(Schema.AttributeAccessor accessor) {
this.accessor = accessor;
}
public abstract void resolve(ScimResource scimResource, Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException;
}
private static class UnresolvedTopLevelBulkId extends UnresolvedTopLevel {
private final String unresolvedBulkIdKey;
public UnresolvedTopLevelBulkId(Schema.AttributeAccessor accessor, String bulkIdKey) {
super(accessor);
this.unresolvedBulkIdKey = bulkIdKey;
}
@Override
public void resolve(ScimResource scimResource, Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException {
BulkOperation resolvedOperationResult = bulkIdKeyToOperationResult.get(this.unresolvedBulkIdKey);
BaseResource response = resolvedOperationResult.getResponse();
ScimResource resolvedResource = resolvedOperationResult.getData();
if ((response == null || !(response instanceof ErrorResponse)) && resolvedResource != null) {
String resolvedId = resolvedResource.getId();
super.accessor.set(scimResource, resolvedId);
} else {
throw new UnresolvableOperationException("Bulk ID cannot be resolved because the resource it refers to had failed to be created: " + this.unresolvedBulkIdKey);
}
}
}
private static class UnresolvedTopLevelComplex extends UnresolvedTopLevel {
public final Object complex;
public final List<UnresolvedComplex> unresolveds;
public UnresolvedTopLevelComplex(Schema.AttributeAccessor accessor, Object complex, List<UnresolvedComplex> unresolveds) {
super(accessor);
this.complex = complex;
this.unresolveds = unresolveds;
}
@Override
public void resolve(ScimResource scimResource, Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException {
for (UnresolvedComplex unresolved : this.unresolveds) {
unresolved.resolve(bulkIdKeyToOperationResult);
}
this.accessor.set(scimResource, this.complex);
}
}
/**
* Search through the subattribute {@code subAttributeValue} and fill
* {@code unresolveds} with unresolved bulkIds.
*
* @param unresolveds
* @param attributeValue
* @param attribute
* @param bulkIdKeyToOperationResult
* @return
* @throws UnresolvableOperationException
*/
private static List<UnresolvedComplex> resolveAttribute(List<UnresolvedComplex> unresolveds, Object attributeValue, Schema.Attribute attribute, Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException {
if (attributeValue == null) {
return unresolveds;
}
Set<Schema.Attribute> attributes = attribute.getAttributes();
for (Schema.Attribute subAttribute : attributes) {
Schema.AttributeAccessor accessor = subAttribute.getAccessor();
if (subAttribute.isScimResourceIdReference()) {
// TODO - This will fail if field is a char or Character array
String bulkIdKey = accessor.get(attributeValue);
if (bulkIdKey != null && bulkIdKey.startsWith("bulkId:")) {
log.debug("Found bulkId: {}", bulkIdKey);
if (bulkIdKeyToOperationResult.containsKey(bulkIdKey)) {
BulkOperation resolvedOperationResult = bulkIdKeyToOperationResult.get(bulkIdKey);
BaseResource response = resolvedOperationResult.getResponse();
ScimResource resolvedResource = resolvedOperationResult.getData();
if ((response == null || !(response instanceof ErrorResponse)) && resolvedResource != null && resolvedResource.getId() != null) {
String resolvedId = resolvedResource.getId();
accessor.set(attributeValue, resolvedId);
} else {
UnresolvedComplex unresolved = new UnresolvedComplex(attributeValue, accessor, bulkIdKey);
unresolveds.add(unresolved);
}
} else {
throw new UnresolvableOperationException(String.format(BULK_ID_DOES_NOT_EXIST, bulkIdKey));
}
}
} else if (subAttribute.getType() == Schema.Attribute.Type.COMPLEX) {
Object subFieldValue = accessor.get(attributeValue);
if (subFieldValue != null) {
Class<?> subFieldClass = subFieldValue.getClass();
boolean isCollection = Collection.class.isAssignableFrom(subFieldClass);
if (isCollection || subFieldClass.isArray()) {
@SuppressWarnings("unchecked")
Collection<Object> subFieldValues = isCollection ? (Collection<Object>) subFieldValue : Arrays.asList((Object[]) subFieldValue);
for (Object subArrayFieldValue : subFieldValues) {
resolveAttribute(unresolveds, subArrayFieldValue, subAttribute, bulkIdKeyToOperationResult);
}
} else {
resolveAttribute(unresolveds, subFieldValue, subAttribute, bulkIdKeyToOperationResult);
}
}
}
}
log.debug("Resolved attribute had {} unresolved fields", unresolveds.size());
return unresolveds;
}
/**
* Attempt to resolve the bulkIds referenced inside of the
* {@link ScimResource} contained inside of {@code bulkOperationResult}. Fill
* {@code unresolveds} with bulkIds that could not be yet resolved.
*
* @param unresolveds
* @param bulkOperationResult
* @param bulkIdKeyToOperationResult
* @throws UnresolvableOperationException
*/
private void resolveTopLevel(List<IWishJavaHadTuples> unresolveds, BulkOperation bulkOperationResult, Map<String, BulkOperation> bulkIdKeyToOperationResult) throws UnresolvableOperationException {
ScimResource scimResource = bulkOperationResult.getData();
String schemaUrn = scimResource.getBaseUrn();
Schema schema = this.schemaRegistry.getSchema(schemaUrn);
List<UnresolvedTopLevel> unresolvedTopLevels = new ArrayList<>();
for (Schema.Attribute attribute : schema.getAttributes()) {
Schema.AttributeAccessor accessor = attribute.getAccessor();
if (attribute.isScimResourceIdReference()) {
String bulkIdKey = accessor.get(scimResource);
if (bulkIdKey != null && bulkIdKey.startsWith("bulkId:")) {
if (bulkIdKeyToOperationResult.containsKey(bulkIdKey)) {
BulkOperation resolvedOperationResult = bulkIdKeyToOperationResult.get(bulkIdKey);
BaseResource response = resolvedOperationResult.getResponse();
ScimResource resolvedResource = resolvedOperationResult.getData();
if ((response == null || !(response instanceof ErrorResponse)) && resolvedResource != null) {
String resolvedId = resolvedResource.getId();
accessor.set(scimResource, resolvedId);
} else {
UnresolvedTopLevel unresolved = new UnresolvedTopLevelBulkId(accessor, bulkIdKey);
accessor.set(scimResource, null);
unresolvedTopLevels.add(unresolved);
}
} else {
throw new UnresolvableOperationException(String.format(BULK_ID_DOES_NOT_EXIST, bulkIdKey));
}
}
} else if (attribute.getType() == Schema.Attribute.Type.COMPLEX) {
Object attributeFieldValue = accessor.get(scimResource);
if (attributeFieldValue != null) {
List<UnresolvedComplex> subUnresolveds = new ArrayList<>();
Class<?> subFieldClass = attributeFieldValue.getClass();
boolean isCollection = Collection.class.isAssignableFrom(subFieldClass);
if (isCollection || subFieldClass.isArray()) {
@SuppressWarnings("unchecked")
Collection<Object> subFieldValues = isCollection ? (Collection<Object>) attributeFieldValue : Arrays.asList((Object[]) attributeFieldValue);
for (Object subArrayFieldValue : subFieldValues) {
resolveAttribute(subUnresolveds, subArrayFieldValue, attribute, bulkIdKeyToOperationResult);
}
} else {
resolveAttribute(subUnresolveds, attributeFieldValue, attribute, bulkIdKeyToOperationResult);
}
if (subUnresolveds.size() > 0) {
UnresolvedTopLevel unresolved = new UnresolvedTopLevelComplex(accessor, attributeFieldValue, subUnresolveds);
accessor.set(scimResource, null);
unresolvedTopLevels.add(unresolved);
}
}
}
}
if (unresolvedTopLevels.size() > 0) {
String bulkIdKey = "bulkId:" + bulkOperationResult.getBulkId();
unresolveds.add(new IWishJavaHadTuples(bulkIdKey, unresolvedTopLevels, bulkOperationResult));
}
}
/**
* Traverse the provided dependency graph and fill {@code visited} with
* visited bulkIds.
*
* @param visited
* @param dependencyGraph
* @param root
* @param current
*/
private static void generateVisited(Set<String> visited, Map<String, Set<String>> dependencyGraph, String root, String current) {
if (!root.equals(current) && !visited.contains(current)) {
visited.add(current);
Set<String> dependencies = dependencyGraph.getOrDefault(current, Collections.emptySet());
for (String dependency : dependencies) {
generateVisited(visited, dependencyGraph, root, dependency);
}
}
}
/**
* If A -> {B} and B -> {C} then A -> {B, C}.
*
* @param dependenciesGraph
* @return
*/
private static Map<String, Set<String>> generateTransitiveDependenciesGraph(Map<String, Set<String>> dependenciesGraph) {
Map<String, Set<String>> transitiveDependenciesGraph = new HashMap<>();
for (Map.Entry<String, Set<String>> entry : dependenciesGraph.entrySet()) {
String root = entry.getKey();
Set<String> dependencies = entry.getValue();
Set<String> visited = new HashSet<>();
transitiveDependenciesGraph.put(root, visited);
for (String dependency : dependencies) {
generateVisited(visited, dependenciesGraph, root, dependency);
}
}
return transitiveDependenciesGraph;
}
private static void generateReverseDependenciesGraph(Map<String, Set<String>> reverseDependenciesGraph, String dependentBulkId, Object scimObject, Set<Schema.Attribute> scimObjectAttributes) {
for (Schema.Attribute scimObjectAttribute : scimObjectAttributes)
if (scimObjectAttribute.isScimResourceIdReference()) {
String reference = scimObjectAttribute.getAccessor().get(scimObject);
if (reference != null && reference.startsWith("bulkId:")) {
Set<String> dependents = reverseDependenciesGraph.computeIfAbsent(reference, (unused) -> new HashSet<>());
dependents.add("bulkId:" + dependentBulkId);
}
} else if (scimObjectAttribute.isMultiValued()) { // all multiValueds
// are COMPLEX, not
// all COMPLEXES are
// multiValued
Object attributeObject = scimObjectAttribute.getAccessor().get(scimObject);
if (attributeObject != null) {
Class<?> attributeObjectClass = attributeObject.getClass();
boolean isCollection = Collection.class.isAssignableFrom(attributeObjectClass);
Collection<?> attributeValues = isCollection ? (Collection<?>) attributeObject : List.of(attributeObject);
Set<Schema.Attribute> subAttributes = scimObjectAttribute.getAttributes();
for (Object attributeValue : attributeValues) {
generateReverseDependenciesGraph(reverseDependenciesGraph, dependentBulkId, attributeValue, subAttributes);
}
}
} else if (scimObjectAttribute.getType() == Schema.Attribute.Type.COMPLEX) {
Object attributeValue = scimObjectAttribute.getAccessor().get(scimObject);
Set<Schema.Attribute> subAttributes = scimObjectAttribute.getAttributes();
generateReverseDependenciesGraph(reverseDependenciesGraph, dependentBulkId, attributeValue, subAttributes);
}
}
/**
* Finds the reverse dependencies of each {@link BulkOperation}.
*
* @param bulkOperations
* @return
*/
private Map<String, Set<String>> generateReverseDependenciesGraph(List<BulkOperation> bulkOperations) {
Map<String, Set<String>> reverseDependenciesGraph = new HashMap<>();
for (BulkOperation bulkOperation : bulkOperations) {
String bulkId = bulkOperation.getBulkId();
if (bulkId != null) {
ScimResource scimResource = bulkOperation.getData();
String scimResourceBaseUrn = scimResource.getBaseUrn();
Schema schema = this.schemaRegistry.getSchema(scimResourceBaseUrn);
Set<Schema.Attribute> attributes = schema.getAttributes();
generateReverseDependenciesGraph(reverseDependenciesGraph, bulkId, scimResource, attributes);
}
}
return reverseDependenciesGraph;
}
}
|
google/j2objc | 36,953 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/zip/ZipFile.java | /*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (c) 1995, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util.zip;
import java.io.Closeable;
import java.io.InputStream;
import java.io.IOException;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.WeakHashMap;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import dalvik.system.CloseGuard;
import dalvik.system.ZipPathValidator;
import static java.util.zip.ZipConstants64.*;
/**
* This class is used to read entries from a zip file.
*
* <p> Unless otherwise noted, passing a <tt>null</tt> argument to a constructor
* or method in this class will cause a {@link NullPointerException} to be
* thrown.
*
* @author David Connelly
*/
public
class ZipFile implements ZipConstants, Closeable {
// Android-note: jzfile does not require @ReachabilitySensitive annotation.
// The @ReachabilitySensitive annotation is usually added to instance fields that references
// native data that is cleaned up when the instance becomes unreachable. Its presence ensures
// that the instance object is not finalized until the field is no longer used. Without it an
// instance could be finalized during execution of an instance method iff that method's this
// variable holds the last reference to the instance and the method had copied all the fields
// it needs out of the instance. That would release the native data, invalidating its reference
// and would cause serious problems if the method had taken a copy of that field and
// then called a native method that would try to use it.
//
// This field does not require the annotation because all usages of this field are enclosed
// within a synchronized(this) block and finalizing of the object referenced in a synchronized
// block is not allowed as that would release its monitor that is currently in use.
/*private*/ long jzfile; // address of jzfile data
private final String name; // zip file name
private final int total; // total number of entries
private final boolean locsig; // if zip file starts with LOCSIG (usually true)
private volatile boolean closeRequested = false;
// Android-added: CloseGuard support.
private final CloseGuard guard = CloseGuard.get();
// Android-added: Do not use unlink() to implement OPEN_DELETE.
// Upstream uses unlink() to cause the file name to be removed from the filesystem after it is
// opened but that does not work on fuse fs as it causes problems with lseek. Android simply
// keeps a reference to the File so that it can explicitly delete it during close.
//
// OpenJDK 9+181 has a pure Java implementation of ZipFile that does not use unlink() and
// instead does something very similar to what Android does. If Android adopts it then this
// patch can be dropped.
// See http://b/28950284 and http://b/28901232 for more details.
private final File fileToRemoveOnClose;
private static final int STORED = ZipEntry.STORED;
private static final int DEFLATED = ZipEntry.DEFLATED;
/**
* Mode flag to open a zip file for reading.
*/
public static final int OPEN_READ = 0x1;
/**
* Mode flag to open a zip file and mark it for deletion. The file will be
* deleted some time between the moment that it is opened and the moment
* that it is closed, but its contents will remain accessible via the
* <tt>ZipFile</tt> object until either the close method is invoked or the
* virtual machine exits.
*/
public static final int OPEN_DELETE = 0x4;
// Android-removed: initIDs() not used on Android.
/*
static {
/* Zip library is loaded from System.initializeSystemClass *
initIDs();
}
private static native void initIDs();
*/
private static final boolean usemmap;
// Android-added: An instance variable that determines if zip path validation should be enabled.
private final boolean isZipPathValidatorEnabled;
static {
// Android-changed: Always use mmap.
/*
// A system prpperty to disable mmap use to avoid vm crash when
// in-use zip file is accidently overwritten by others.
String prop = sun.misc.VM.getSavedProperty("sun.zip.disableMemoryMapping");
usemmap = (prop == null ||
!(prop.length() == 0 || prop.equalsIgnoreCase("true")));
*/
usemmap = true;
}
// Android-changed: Additional ZipException throw scenario with ZipPathValidator.
/**
* Opens a zip file for reading.
*
* <p>First, if there is a security manager, its <code>checkRead</code>
* method is called with the <code>name</code> argument as its argument
* to ensure the read is allowed.
*
* <p>The UTF-8 {@link java.nio.charset.Charset charset} is used to
* decode the entry names and comments.
*
* <p>If the app targets Android U or above, zip file entry names containing
* ".." or starting with "/" passed here will throw a {@link ZipException}.
* For more details, see {@link dalvik.system.ZipPathValidator}.
*
* @param name the name of the zip file
* @throws ZipException if (1) a ZIP format error has occurred or
* (2) <code>targetSdkVersion >= BUILD.VERSION_CODES.UPSIDE_DOWN_CAKE</code>
* and (the <code>name</code> argument contains ".." or starts with "/").
* @throws IOException if an I/O error has occurred
* @throws SecurityException if a security manager exists and its
* <code>checkRead</code> method doesn't allow read access to the file.
*
* @see SecurityManager#checkRead(java.lang.String)
*/
public ZipFile(String name) throws IOException {
this(new File(name), OPEN_READ);
}
/**
* Opens a new <code>ZipFile</code> to read from the specified
* <code>File</code> object in the specified mode. The mode argument
* must be either <tt>OPEN_READ</tt> or <tt>OPEN_READ | OPEN_DELETE</tt>.
*
* <p>First, if there is a security manager, its <code>checkRead</code>
* method is called with the <code>name</code> argument as its argument to
* ensure the read is allowed.
*
* <p>The UTF-8 {@link java.nio.charset.Charset charset} is used to
* decode the entry names and comments
*
* @param file the ZIP file to be opened for reading
* @param mode the mode in which the file is to be opened
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
* @throws SecurityException if a security manager exists and
* its <code>checkRead</code> method
* doesn't allow read access to the file,
* or its <code>checkDelete</code> method doesn't allow deleting
* the file when the <tt>OPEN_DELETE</tt> flag is set.
* @throws IllegalArgumentException if the <tt>mode</tt> argument is invalid
* @see SecurityManager#checkRead(java.lang.String)
* @since 1.3
*/
public ZipFile(File file, int mode) throws IOException {
this(file, mode, StandardCharsets.UTF_8);
}
/**
* Opens a ZIP file for reading given the specified File object.
*
* <p>The UTF-8 {@link java.nio.charset.Charset charset} is used to
* decode the entry names and comments.
*
* @param file the ZIP file to be opened for reading
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
*/
public ZipFile(File file) throws ZipException, IOException {
this(file, OPEN_READ);
}
private ZipCoder zc;
// Android-changed: Use of the hidden constructor with a new argument for zip path validation.
/**
* Opens a new <code>ZipFile</code> to read from the specified
* <code>File</code> object in the specified mode. The mode argument
* must be either <tt>OPEN_READ</tt> or <tt>OPEN_READ | OPEN_DELETE</tt>.
*
* <p>First, if there is a security manager, its <code>checkRead</code>
* method is called with the <code>name</code> argument as its argument to
* ensure the read is allowed.
*
* @param file the ZIP file to be opened for reading
* @param mode the mode in which the file is to be opened
* @param charset
* the {@linkplain java.nio.charset.Charset charset} to
* be used to decode the ZIP entry name and comment that are not
* encoded by using UTF-8 encoding (indicated by entry's general
* purpose flag).
*
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
*
* @throws SecurityException
* if a security manager exists and its <code>checkRead</code>
* method doesn't allow read access to the file,or its
* <code>checkDelete</code> method doesn't allow deleting the
* file when the <tt>OPEN_DELETE</tt> flag is set
*
* @throws IllegalArgumentException if the <tt>mode</tt> argument is invalid
*
* @see SecurityManager#checkRead(java.lang.String)
*
* @since 1.7
*/
public ZipFile(File file, int mode, Charset charset) throws IOException
{
this(file, mode, charset, /* enableZipPathValidator */ true);
}
// Android-added: New hidden constructor with an argument for zip path validation.
/** @hide */
public ZipFile(File file, int mode, boolean enableZipPathValidator) throws IOException {
this(file, mode, StandardCharsets.UTF_8, enableZipPathValidator);
}
// Android-changed: Change existing constructor ZipFile(File file, int mode, Charset charset)
// to have a new argument enableZipPathValidator in order to set the isZipPathValidatorEnabled
// variable before calling the native method open().
/** @hide */
public ZipFile(File file, int mode, Charset charset, boolean enableZipPathValidator)
throws IOException {
isZipPathValidatorEnabled = enableZipPathValidator && !ZipPathValidator.isClear();
if (((mode & OPEN_READ) == 0) ||
((mode & ~(OPEN_READ | OPEN_DELETE)) != 0)) {
throw new IllegalArgumentException("Illegal mode: 0x"+
Integer.toHexString(mode));
}
String name = file.getPath();
// Android-removed: SecurityManager is always null.
/*
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkRead(name);
if ((mode & OPEN_DELETE) != 0) {
sm.checkDelete(name);
}
}
*/
// Android-added: Do not use unlink() to implement OPEN_DELETE.
fileToRemoveOnClose = ((mode & OPEN_DELETE) != 0) ? file : null;
if (charset == null)
throw new NullPointerException("charset is null");
this.zc = ZipCoder.get(charset);
// Android-removed: Skip perf counters.
// long t0 = System.nanoTime();
jzfile = open(name, mode, file.lastModified(), usemmap);
// Android-removed: Skip perf counters.
// sun.misc.PerfCounter.getZipFileOpenTime().addElapsedTimeFrom(t0);
// sun.misc.PerfCounter.getZipFileCount().increment();
this.name = name;
this.total = getTotal(jzfile);
this.locsig = startsWithLOC(jzfile);
// Android-added: CloseGuard support.
guard.open("close");
}
/**
* Opens a zip file for reading.
*
* <p>First, if there is a security manager, its <code>checkRead</code>
* method is called with the <code>name</code> argument as its argument
* to ensure the read is allowed.
*
* @param name the name of the zip file
* @param charset
* the {@linkplain java.nio.charset.Charset charset} to
* be used to decode the ZIP entry name and comment that are not
* encoded by using UTF-8 encoding (indicated by entry's general
* purpose flag).
*
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
* @throws SecurityException
* if a security manager exists and its <code>checkRead</code>
* method doesn't allow read access to the file
*
* @see SecurityManager#checkRead(java.lang.String)
*
* @since 1.7
*/
public ZipFile(String name, Charset charset) throws IOException
{
this(new File(name), OPEN_READ, charset);
}
/**
* Opens a ZIP file for reading given the specified File object.
* @param file the ZIP file to be opened for reading
* @param charset
* The {@linkplain java.nio.charset.Charset charset} to be
* used to decode the ZIP entry name and comment (ignored if
* the <a href="package-summary.html#lang_encoding"> language
* encoding bit</a> of the ZIP entry's general purpose bit
* flag is set).
*
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
*
* @since 1.7
*/
public ZipFile(File file, Charset charset) throws IOException
{
this(file, OPEN_READ, charset);
}
/**
* Returns the zip file comment, or null if none.
*
* @return the comment string for the zip file, or null if none
*
* @throws IllegalStateException if the zip file has been closed
*
* Since 1.7
*/
public String getComment() {
synchronized (this) {
ensureOpen();
byte[] bcomm = getCommentBytes(jzfile);
if (bcomm == null)
return null;
return zc.toString(bcomm, bcomm.length);
}
}
/**
* Returns the zip file entry for the specified name, or null
* if not found.
*
* @param name the name of the entry
* @return the zip file entry, or null if not found
* @throws IllegalStateException if the zip file has been closed
*/
public ZipEntry getEntry(String name) {
if (name == null) {
throw new NullPointerException("name");
}
long jzentry = 0;
synchronized (this) {
ensureOpen();
jzentry = getEntry(jzfile, zc.getBytes(name), true);
if (jzentry != 0) {
ZipEntry ze = getZipEntry(name, jzentry);
freeEntry(jzfile, jzentry);
return ze;
}
}
return null;
}
private static native long getEntry(long jzfile, byte[] name,
boolean addSlash);
// freeEntry releases the C jzentry struct.
private static native void freeEntry(long jzfile, long jzentry);
// the outstanding inputstreams that need to be closed,
// mapped to the inflater objects they use.
private final Map<InputStream, Inflater> streams = new WeakHashMap<>();
/**
* Returns an input stream for reading the contents of the specified
* zip file entry.
*
* <p> Closing this ZIP file will, in turn, close all input
* streams that have been returned by invocations of this method.
*
* @param entry the zip file entry
* @return the input stream for reading the contents of the specified
* zip file entry.
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
* @throws IllegalStateException if the zip file has been closed
*/
public InputStream getInputStream(ZipEntry entry) throws IOException {
if (entry == null) {
throw new NullPointerException("entry");
}
long jzentry = 0;
ZipFileInputStream in = null;
synchronized (this) {
ensureOpen();
if (!zc.isUTF8() && (entry.flag & USE_UTF8) != 0) {
// Android-changed: Find entry by name, falling back to name/ if cannot be found.
// Needed for ClassPathURLStreamHandler handling of URLs without trailing slashes.
// This was added as part of the work to move StrictJarFile from libcore to
// framework, see http://b/111293098 for more details.
// It should be possible to revert this after upgrading to OpenJDK 8u144 or above.
// jzentry = getEntry(jzfile, zc.getBytesUTF8(entry.name), false);
jzentry = getEntry(jzfile, zc.getBytesUTF8(entry.name), true);
} else {
// Android-changed: Find entry by name, falling back to name/ if cannot be found.
// jzentry = getEntry(jzfile, zc.getBytes(entry.name), false);
jzentry = getEntry(jzfile, zc.getBytes(entry.name), true);
}
if (jzentry == 0) {
return null;
}
in = new ZipFileInputStream(jzentry);
switch (getEntryMethod(jzentry)) {
case STORED:
synchronized (streams) {
streams.put(in, null);
}
return in;
case DEFLATED:
// MORE: Compute good size for inflater stream:
long size = getEntrySize(jzentry) + 2; // Inflater likes a bit of slack
// Android-changed: Use 64k buffer size, performs better than 8k.
// See http://b/65491407.
// if (size > 65536) size = 8192;
if (size > 65536) size = 65536;
if (size <= 0) size = 4096;
Inflater inf = getInflater();
InputStream is =
new ZipFileInflaterInputStream(in, inf, (int)size);
synchronized (streams) {
streams.put(is, inf);
}
return is;
default:
throw new ZipException("invalid compression method");
}
}
}
private class ZipFileInflaterInputStream extends InflaterInputStream {
private volatile boolean closeRequested = false;
private boolean eof = false;
private final ZipFileInputStream zfin;
ZipFileInflaterInputStream(ZipFileInputStream zfin, Inflater inf,
int size) {
super(zfin, inf, size);
this.zfin = zfin;
}
public void close() throws IOException {
if (closeRequested)
return;
closeRequested = true;
super.close();
Inflater inf;
synchronized (streams) {
inf = streams.remove(this);
}
if (inf != null) {
releaseInflater(inf);
}
}
// Override fill() method to provide an extra "dummy" byte
// at the end of the input stream. This is required when
// using the "nowrap" Inflater option.
protected void fill() throws IOException {
if (eof) {
throw new EOFException("Unexpected end of ZLIB input stream");
}
len = in.read(buf, 0, buf.length);
if (len == -1) {
buf[0] = 0;
len = 1;
eof = true;
}
inf.setInput(buf, 0, len);
}
public int available() throws IOException {
if (closeRequested)
return 0;
long avail = zfin.size() - inf.getBytesWritten();
return (avail > (long) Integer.MAX_VALUE ?
Integer.MAX_VALUE : (int) avail);
}
protected void finalize() throws Throwable {
close();
}
}
/*
* Gets an inflater from the list of available inflaters or allocates
* a new one.
*/
private Inflater getInflater() {
Inflater inf;
synchronized (inflaterCache) {
while (null != (inf = inflaterCache.poll())) {
if (false == inf.ended()) {
return inf;
}
}
}
return new Inflater(true);
}
/*
* Releases the specified inflater to the list of available inflaters.
*/
private void releaseInflater(Inflater inf) {
if (false == inf.ended()) {
inf.reset();
synchronized (inflaterCache) {
inflaterCache.add(inf);
}
}
}
// List of available Inflater objects for decompression
private Deque<Inflater> inflaterCache = new ArrayDeque<>();
/**
* Returns the path name of the ZIP file.
* @return the path name of the ZIP file
*/
public String getName() {
return name;
}
private class ZipEntryIterator implements Enumeration<ZipEntry>, Iterator<ZipEntry> {
private int i = 0;
public ZipEntryIterator() {
ensureOpen();
}
public boolean hasMoreElements() {
return hasNext();
}
public boolean hasNext() {
synchronized (ZipFile.this) {
ensureOpen();
return i < total;
}
}
public ZipEntry nextElement() {
return next();
}
public ZipEntry next() {
synchronized (ZipFile.this) {
ensureOpen();
if (i >= total) {
throw new NoSuchElementException();
}
long jzentry = getNextEntry(jzfile, i++);
if (jzentry == 0) {
String message;
if (closeRequested) {
message = "ZipFile concurrently closed";
} else {
message = getZipMessage(ZipFile.this.jzfile);
}
throw new ZipError("jzentry == 0" +
",\n jzfile = " + ZipFile.this.jzfile +
",\n total = " + ZipFile.this.total +
",\n name = " + ZipFile.this.name +
",\n i = " + i +
",\n message = " + message
);
}
ZipEntry ze = getZipEntry(null, jzentry);
freeEntry(jzfile, jzentry);
return ze;
}
}
}
/**
* Returns an enumeration of the ZIP file entries.
* @return an enumeration of the ZIP file entries
* @throws IllegalStateException if the zip file has been closed
*/
public Enumeration<? extends ZipEntry> entries() {
return new ZipEntryIterator();
}
/**
* Return an ordered {@code Stream} over the ZIP file entries.
* Entries appear in the {@code Stream} in the order they appear in
* the central directory of the ZIP file.
*
* @return an ordered {@code Stream} of entries in this ZIP file
* @throws IllegalStateException if the zip file has been closed
* @since 1.8
*/
public Stream<? extends ZipEntry> stream() {
return StreamSupport.stream(Spliterators.spliterator(
new ZipEntryIterator(), size(),
Spliterator.ORDERED | Spliterator.DISTINCT |
Spliterator.IMMUTABLE | Spliterator.NONNULL), false);
}
// Android-added: Hook to validate zip entry name by ZipPathValidator.
private void onZipEntryAccess(byte[] bname, int flag) throws ZipException {
String name;
if (!zc.isUTF8() && (flag & USE_UTF8) != 0) {
name = zc.toStringUTF8(bname, bname.length);
} else {
name = zc.toString(bname, bname.length);
}
ZipPathValidator.getInstance().onZipEntryAccess(name);
}
private ZipEntry getZipEntry(String name, long jzentry) {
ZipEntry e = new ZipEntry();
e.flag = getEntryFlag(jzentry); // get the flag first
if (name != null) {
e.name = name;
} else {
byte[] bname = getEntryBytes(jzentry, JZENTRY_NAME);
if (!zc.isUTF8() && (e.flag & USE_UTF8) != 0) {
e.name = zc.toStringUTF8(bname, bname.length);
} else {
e.name = zc.toString(bname, bname.length);
}
}
e.xdostime = getEntryTime(jzentry);
e.crc = getEntryCrc(jzentry);
e.size = getEntrySize(jzentry);
e.csize = getEntryCSize(jzentry);
e.method = getEntryMethod(jzentry);
e.setExtra0(getEntryBytes(jzentry, JZENTRY_EXTRA), false, false);
byte[] bcomm = getEntryBytes(jzentry, JZENTRY_COMMENT);
if (bcomm == null) {
e.comment = null;
} else {
if (!zc.isUTF8() && (e.flag & USE_UTF8) != 0) {
e.comment = zc.toStringUTF8(bcomm, bcomm.length);
} else {
e.comment = zc.toString(bcomm, bcomm.length);
}
}
return e;
}
private static native long getNextEntry(long jzfile, int i);
/**
* Returns the number of entries in the ZIP file.
* @return the number of entries in the ZIP file
* @throws IllegalStateException if the zip file has been closed
*/
public int size() {
ensureOpen();
return total;
}
/**
* Closes the ZIP file.
* <p> Closing this ZIP file will close all of the input streams
* previously returned by invocations of the {@link #getInputStream
* getInputStream} method.
*
* @throws IOException if an I/O error has occurred
*/
public void close() throws IOException {
if (closeRequested)
return;
// Android-added: CloseGuard support.
if (guard != null) {
guard.close();
}
closeRequested = true;
synchronized (this) {
// Close streams, release their inflaters
// BEGIN Android-added: null field check to avoid NullPointerException during finalize.
// If the constructor threw an exception then the streams / inflaterCache fields can
// be null and close() can be called by the finalizer.
if (streams != null) {
// END Android-added: null field check to avoid NullPointerException during finalize.
synchronized (streams) {
if (false == streams.isEmpty()) {
Map<InputStream, Inflater> copy = new HashMap<>(streams);
streams.clear();
for (Map.Entry<InputStream, Inflater> e : copy.entrySet()) {
e.getKey().close();
Inflater inf = e.getValue();
if (inf != null) {
inf.end();
}
}
}
}
// BEGIN Android-added: null field check to avoid NullPointerException during finalize.
}
if (inflaterCache != null) {
// END Android-added: null field check to avoid NullPointerException during finalize.
// Release cached inflaters
Inflater inf;
synchronized (inflaterCache) {
while (null != (inf = inflaterCache.poll())) {
inf.end();
}
}
// BEGIN Android-added: null field check to avoid NullPointerException during finalize.
}
// END Android-added: null field check to avoid NullPointerException during finalize.
if (jzfile != 0) {
// Close the zip file
long zf = this.jzfile;
jzfile = 0;
close(zf);
}
// Android-added: Do not use unlink() to implement OPEN_DELETE.
if (fileToRemoveOnClose != null) {
fileToRemoveOnClose.delete();
}
}
}
/**
* Ensures that the system resources held by this ZipFile object are
* released when there are no more references to it.
*
* <p>
* Since the time when GC would invoke this method is undetermined,
* it is strongly recommended that applications invoke the <code>close</code>
* method as soon they have finished accessing this <code>ZipFile</code>.
* This will prevent holding up system resources for an undetermined
* length of time.
*
* @throws IOException if an I/O error has occurred
* @see java.util.zip.ZipFile#close()
*/
protected void finalize() throws IOException {
// Android-added: CloseGuard support.
if (guard != null) {
guard.warnIfOpen();
}
close();
}
private static native void close(long jzfile);
private void ensureOpen() {
if (closeRequested) {
throw new IllegalStateException("zip file closed");
}
if (jzfile == 0) {
throw new IllegalStateException("The object is not initialized.");
}
}
private void ensureOpenOrZipException() throws IOException {
if (closeRequested) {
throw new ZipException("ZipFile closed");
}
}
/*
* Inner class implementing the input stream used to read a
* (possibly compressed) zip file entry.
*/
private class ZipFileInputStream extends InputStream {
private volatile boolean zfisCloseRequested = false;
protected long jzentry; // address of jzentry data
private long pos; // current position within entry data
protected long rem; // number of remaining bytes within entry
protected long size; // uncompressed size of this entry
ZipFileInputStream(long jzentry) {
pos = 0;
rem = getEntryCSize(jzentry);
size = getEntrySize(jzentry);
this.jzentry = jzentry;
}
public int read(byte b[], int off, int len) throws IOException {
// Android-added: Always throw an exception when reading from closed zipfile.
// Required by the JavaDoc for InputStream.read(byte[], int, int). Upstream version
// 8u121-b13 is not compliant but that bug has been fixed in upstream version 9+181
// as part of a major change to switch to a pure Java implementation.
// See https://bugs.openjdk.java.net/browse/JDK-8145260 and
// https://bugs.openjdk.java.net/browse/JDK-8142508.
ensureOpenOrZipException();
synchronized (ZipFile.this) {
long rem = this.rem;
long pos = this.pos;
if (rem == 0) {
return -1;
}
if (len <= 0) {
return 0;
}
if (len > rem) {
len = (int) rem;
}
// Android-removed: Always throw an exception when reading from closed zipfile.
// Moved to the start of the method.
//ensureOpenOrZipException();
len = ZipFile.read(ZipFile.this.jzfile, jzentry, pos, b,
off, len);
if (len > 0) {
this.pos = (pos + len);
this.rem = (rem - len);
}
}
if (rem == 0) {
close();
}
return len;
}
public int read() throws IOException {
byte[] b = new byte[1];
if (read(b, 0, 1) == 1) {
return b[0] & 0xff;
} else {
return -1;
}
}
public long skip(long n) {
if (n > rem)
n = rem;
pos += n;
rem -= n;
if (rem == 0) {
close();
}
return n;
}
public int available() {
return rem > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) rem;
}
public long size() {
return size;
}
public void close() {
if (zfisCloseRequested)
return;
zfisCloseRequested = true;
rem = 0;
synchronized (ZipFile.this) {
if (jzentry != 0 && ZipFile.this.jzfile != 0) {
freeEntry(ZipFile.this.jzfile, jzentry);
jzentry = 0;
}
}
synchronized (streams) {
streams.remove(this);
}
}
protected void finalize() {
close();
}
}
// Android-removed: Access startsWithLocHeader() directly.
/*
static {
sun.misc.SharedSecrets.setJavaUtilZipFileAccess(
new sun.misc.JavaUtilZipFileAccess() {
public boolean startsWithLocHeader(ZipFile zip) {
return zip.startsWithLocHeader();
}
}
);
}
*/
/**
* Returns {@code true} if, and only if, the zip file begins with {@code
* LOCSIG}.
* @hide
*/
// Android-changed: Access startsWithLocHeader() directly.
// Make hidden public for use by sun.misc.URLClassPath
// private boolean startsWithLocHeader() {
public boolean startsWithLocHeader() {
return locsig;
}
// BEGIN Android-added: Provide access to underlying file descriptor for testing.
// See http://b/111148957 for background information.
/** @hide */
// @VisibleForTesting
public int getFileDescriptor() {
return getFileDescriptor(jzfile);
}
private static native int getFileDescriptor(long jzfile);
// END Android-added: Provide access to underlying file descriptor for testing.
// Android-changed: Make it as a non-static method, so it can access charset config.
private native long open(String name, int mode, long lastModified,
boolean usemmap) throws IOException;
private static native int getTotal(long jzfile);
private static native boolean startsWithLOC(long jzfile);
private static native int read(long jzfile, long jzentry,
long pos, byte[] b, int off, int len);
// access to the native zentry object
private static native long getEntryTime(long jzentry);
private static native long getEntryCrc(long jzentry);
private static native long getEntryCSize(long jzentry);
private static native long getEntrySize(long jzentry);
private static native int getEntryMethod(long jzentry);
private static native int getEntryFlag(long jzentry);
private static native byte[] getCommentBytes(long jzfile);
private static final int JZENTRY_NAME = 0;
private static final int JZENTRY_EXTRA = 1;
private static final int JZENTRY_COMMENT = 2;
private static native byte[] getEntryBytes(long jzentry, int type);
private static native String getZipMessage(long jzfile);
} |
googleapis/google-cloud-java | 36,720 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/AddExecutionEventsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/metadata_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for
* [MetadataService.AddExecutionEvents][google.cloud.aiplatform.v1.MetadataService.AddExecutionEvents].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.AddExecutionEventsRequest}
*/
public final class AddExecutionEventsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.AddExecutionEventsRequest)
AddExecutionEventsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddExecutionEventsRequest.newBuilder() to construct.
private AddExecutionEventsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AddExecutionEventsRequest() {
execution_ = "";
events_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AddExecutionEventsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.MetadataServiceProto
.internal_static_google_cloud_aiplatform_v1_AddExecutionEventsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.MetadataServiceProto
.internal_static_google_cloud_aiplatform_v1_AddExecutionEventsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.class,
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.Builder.class);
}
public static final int EXECUTION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object execution_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The execution.
*/
@java.lang.Override
public java.lang.String getExecution() {
java.lang.Object ref = execution_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
execution_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for execution.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExecutionBytes() {
java.lang.Object ref = execution_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
execution_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EVENTS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Event> events_;
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Event> getEventsList() {
return events_;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.EventOrBuilder>
getEventsOrBuilderList() {
return events_;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
@java.lang.Override
public int getEventsCount() {
return events_.size();
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Event getEvents(int index) {
return events_.get(index);
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.EventOrBuilder getEventsOrBuilder(int index) {
return events_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(execution_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, execution_);
}
for (int i = 0; i < events_.size(); i++) {
output.writeMessage(2, events_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(execution_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, execution_);
}
for (int i = 0; i < events_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, events_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.AddExecutionEventsRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest other =
(com.google.cloud.aiplatform.v1.AddExecutionEventsRequest) obj;
if (!getExecution().equals(other.getExecution())) return false;
if (!getEventsList().equals(other.getEventsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EXECUTION_FIELD_NUMBER;
hash = (53 * hash) + getExecution().hashCode();
if (getEventsCount() > 0) {
hash = (37 * hash) + EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getEventsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [MetadataService.AddExecutionEvents][google.cloud.aiplatform.v1.MetadataService.AddExecutionEvents].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.AddExecutionEventsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.AddExecutionEventsRequest)
com.google.cloud.aiplatform.v1.AddExecutionEventsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.MetadataServiceProto
.internal_static_google_cloud_aiplatform_v1_AddExecutionEventsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.MetadataServiceProto
.internal_static_google_cloud_aiplatform_v1_AddExecutionEventsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.class,
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
execution_ = "";
if (eventsBuilder_ == null) {
events_ = java.util.Collections.emptyList();
} else {
events_ = null;
eventsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.MetadataServiceProto
.internal_static_google_cloud_aiplatform_v1_AddExecutionEventsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.AddExecutionEventsRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.AddExecutionEventsRequest build() {
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.AddExecutionEventsRequest buildPartial() {
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest result =
new com.google.cloud.aiplatform.v1.AddExecutionEventsRequest(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.AddExecutionEventsRequest result) {
if (eventsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
events_ = java.util.Collections.unmodifiableList(events_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.events_ = events_;
} else {
result.events_ = eventsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.AddExecutionEventsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.execution_ = execution_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.AddExecutionEventsRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.AddExecutionEventsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.AddExecutionEventsRequest other) {
if (other == com.google.cloud.aiplatform.v1.AddExecutionEventsRequest.getDefaultInstance())
return this;
if (!other.getExecution().isEmpty()) {
execution_ = other.execution_;
bitField0_ |= 0x00000001;
onChanged();
}
if (eventsBuilder_ == null) {
if (!other.events_.isEmpty()) {
if (events_.isEmpty()) {
events_ = other.events_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureEventsIsMutable();
events_.addAll(other.events_);
}
onChanged();
}
} else {
if (!other.events_.isEmpty()) {
if (eventsBuilder_.isEmpty()) {
eventsBuilder_.dispose();
eventsBuilder_ = null;
events_ = other.events_;
bitField0_ = (bitField0_ & ~0x00000002);
eventsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEventsFieldBuilder()
: null;
} else {
eventsBuilder_.addAllMessages(other.events_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
execution_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.aiplatform.v1.Event m =
input.readMessage(
com.google.cloud.aiplatform.v1.Event.parser(), extensionRegistry);
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
events_.add(m);
} else {
eventsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object execution_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The execution.
*/
public java.lang.String getExecution() {
java.lang.Object ref = execution_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
execution_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for execution.
*/
public com.google.protobuf.ByteString getExecutionBytes() {
java.lang.Object ref = execution_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
execution_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The execution to set.
* @return This builder for chaining.
*/
public Builder setExecution(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
execution_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearExecution() {
execution_ = getDefaultInstance().getExecution();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Execution that the Events connect
* Artifacts with.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for execution to set.
* @return This builder for chaining.
*/
public Builder setExecutionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
execution_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.aiplatform.v1.Event> events_ =
java.util.Collections.emptyList();
private void ensureEventsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
events_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Event>(events_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Event,
com.google.cloud.aiplatform.v1.Event.Builder,
com.google.cloud.aiplatform.v1.EventOrBuilder>
eventsBuilder_;
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Event> getEventsList() {
if (eventsBuilder_ == null) {
return java.util.Collections.unmodifiableList(events_);
} else {
return eventsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public int getEventsCount() {
if (eventsBuilder_ == null) {
return events_.size();
} else {
return eventsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Event getEvents(int index) {
if (eventsBuilder_ == null) {
return events_.get(index);
} else {
return eventsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder setEvents(int index, com.google.cloud.aiplatform.v1.Event value) {
if (eventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEventsIsMutable();
events_.set(index, value);
onChanged();
} else {
eventsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder setEvents(
int index, com.google.cloud.aiplatform.v1.Event.Builder builderForValue) {
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
events_.set(index, builderForValue.build());
onChanged();
} else {
eventsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder addEvents(com.google.cloud.aiplatform.v1.Event value) {
if (eventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEventsIsMutable();
events_.add(value);
onChanged();
} else {
eventsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder addEvents(int index, com.google.cloud.aiplatform.v1.Event value) {
if (eventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEventsIsMutable();
events_.add(index, value);
onChanged();
} else {
eventsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder addEvents(com.google.cloud.aiplatform.v1.Event.Builder builderForValue) {
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
events_.add(builderForValue.build());
onChanged();
} else {
eventsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder addEvents(
int index, com.google.cloud.aiplatform.v1.Event.Builder builderForValue) {
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
events_.add(index, builderForValue.build());
onChanged();
} else {
eventsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder addAllEvents(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Event> values) {
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, events_);
onChanged();
} else {
eventsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder clearEvents() {
if (eventsBuilder_ == null) {
events_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
eventsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public Builder removeEvents(int index) {
if (eventsBuilder_ == null) {
ensureEventsIsMutable();
events_.remove(index);
onChanged();
} else {
eventsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Event.Builder getEventsBuilder(int index) {
return getEventsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public com.google.cloud.aiplatform.v1.EventOrBuilder getEventsOrBuilder(int index) {
if (eventsBuilder_ == null) {
return events_.get(index);
} else {
return eventsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.EventOrBuilder>
getEventsOrBuilderList() {
if (eventsBuilder_ != null) {
return eventsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(events_);
}
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Event.Builder addEventsBuilder() {
return getEventsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Event.getDefaultInstance());
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Event.Builder addEventsBuilder(int index) {
return getEventsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Event.getDefaultInstance());
}
/**
*
*
* <pre>
* The Events to create and add.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Event events = 2;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Event.Builder> getEventsBuilderList() {
return getEventsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Event,
com.google.cloud.aiplatform.v1.Event.Builder,
com.google.cloud.aiplatform.v1.EventOrBuilder>
getEventsFieldBuilder() {
if (eventsBuilder_ == null) {
eventsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Event,
com.google.cloud.aiplatform.v1.Event.Builder,
com.google.cloud.aiplatform.v1.EventOrBuilder>(
events_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
events_ = null;
}
return eventsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.AddExecutionEventsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.AddExecutionEventsRequest)
private static final com.google.cloud.aiplatform.v1.AddExecutionEventsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.AddExecutionEventsRequest();
}
public static com.google.cloud.aiplatform.v1.AddExecutionEventsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AddExecutionEventsRequest> PARSER =
new com.google.protobuf.AbstractParser<AddExecutionEventsRequest>() {
@java.lang.Override
public AddExecutionEventsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AddExecutionEventsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AddExecutionEventsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.AddExecutionEventsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,720 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/CreateControlRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.CreateControlRequest}
*/
public final class CreateControlRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.CreateControlRequest)
CreateControlRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateControlRequest.newBuilder() to construct.
private CreateControlRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateControlRequest() {
parent_ = "";
controlId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateControlRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.CreateControlRequest.class,
com.google.cloud.retail.v2beta.CreateControlRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTROL_FIELD_NUMBER = 2;
private com.google.cloud.retail.v2beta.Control control_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.Control getControl() {
return control_ == null
? com.google.cloud.retail.v2beta.Control.getDefaultInstance()
: control_;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.ControlOrBuilder getControlOrBuilder() {
return control_ == null
? com.google.cloud.retail.v2beta.Control.getDefaultInstance()
: control_;
}
public static final int CONTROL_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
@java.lang.Override
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, controlId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, controlId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.CreateControlRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.CreateControlRequest other =
(com.google.cloud.retail.v2beta.CreateControlRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasControl() != other.hasControl()) return false;
if (hasControl()) {
if (!getControl().equals(other.getControl())) return false;
}
if (!getControlId().equals(other.getControlId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasControl()) {
hash = (37 * hash) + CONTROL_FIELD_NUMBER;
hash = (53 * hash) + getControl().hashCode();
}
hash = (37 * hash) + CONTROL_ID_FIELD_NUMBER;
hash = (53 * hash) + getControlId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2beta.CreateControlRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.CreateControlRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.CreateControlRequest)
com.google.cloud.retail.v2beta.CreateControlRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.CreateControlRequest.class,
com.google.cloud.retail.v2beta.CreateControlRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.CreateControlRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getControlFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
controlId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.ControlServiceProto
.internal_static_google_cloud_retail_v2beta_CreateControlRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.CreateControlRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.CreateControlRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.CreateControlRequest build() {
com.google.cloud.retail.v2beta.CreateControlRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.CreateControlRequest buildPartial() {
com.google.cloud.retail.v2beta.CreateControlRequest result =
new com.google.cloud.retail.v2beta.CreateControlRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2beta.CreateControlRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.control_ = controlBuilder_ == null ? control_ : controlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.controlId_ = controlId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.CreateControlRequest) {
return mergeFrom((com.google.cloud.retail.v2beta.CreateControlRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.CreateControlRequest other) {
if (other == com.google.cloud.retail.v2beta.CreateControlRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasControl()) {
mergeControl(other.getControl());
}
if (!other.getControlId().isEmpty()) {
controlId_ = other.controlId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getControlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
controlId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.retail.v2beta.Control control_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.Control,
com.google.cloud.retail.v2beta.Control.Builder,
com.google.cloud.retail.v2beta.ControlOrBuilder>
controlBuilder_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
public boolean hasControl() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
public com.google.cloud.retail.v2beta.Control getControl() {
if (controlBuilder_ == null) {
return control_ == null
? com.google.cloud.retail.v2beta.Control.getDefaultInstance()
: control_;
} else {
return controlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.retail.v2beta.Control value) {
if (controlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
control_ = value;
} else {
controlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.retail.v2beta.Control.Builder builderForValue) {
if (controlBuilder_ == null) {
control_ = builderForValue.build();
} else {
controlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeControl(com.google.cloud.retail.v2beta.Control value) {
if (controlBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& control_ != null
&& control_ != com.google.cloud.retail.v2beta.Control.getDefaultInstance()) {
getControlBuilder().mergeFrom(value);
} else {
control_ = value;
}
} else {
controlBuilder_.mergeFrom(value);
}
if (control_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearControl() {
bitField0_ = (bitField0_ & ~0x00000002);
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.Control.Builder getControlBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getControlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.ControlOrBuilder getControlOrBuilder() {
if (controlBuilder_ != null) {
return controlBuilder_.getMessageOrBuilder();
} else {
return control_ == null
? com.google.cloud.retail.v2beta.Control.getDefaultInstance()
: control_;
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.Control,
com.google.cloud.retail.v2beta.Control.Builder,
com.google.cloud.retail.v2beta.ControlOrBuilder>
getControlFieldBuilder() {
if (controlBuilder_ == null) {
controlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.Control,
com.google.cloud.retail.v2beta.Control.Builder,
com.google.cloud.retail.v2beta.ControlOrBuilder>(
getControl(), getParentForChildren(), isClean());
control_ = null;
}
return controlBuilder_;
}
private java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The controlId to set.
* @return This builder for chaining.
*/
public Builder setControlId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearControlId() {
controlId_ = getDefaultInstance().getControlId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for controlId to set.
* @return This builder for chaining.
*/
public Builder setControlIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.CreateControlRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.CreateControlRequest)
private static final com.google.cloud.retail.v2beta.CreateControlRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.CreateControlRequest();
}
public static com.google.cloud.retail.v2beta.CreateControlRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateControlRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateControlRequest>() {
@java.lang.Override
public CreateControlRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateControlRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateControlRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.CreateControlRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,791 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/RecurringTimeWindow.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
/**
*
*
* <pre>
* Represents an arbitrary window of time that recurs.
* </pre>
*
* Protobuf type {@code google.container.v1.RecurringTimeWindow}
*/
public final class RecurringTimeWindow extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1.RecurringTimeWindow)
RecurringTimeWindowOrBuilder {
private static final long serialVersionUID = 0L;
// Use RecurringTimeWindow.newBuilder() to construct.
private RecurringTimeWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RecurringTimeWindow() {
recurrence_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RecurringTimeWindow();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_RecurringTimeWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_RecurringTimeWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.RecurringTimeWindow.class,
com.google.container.v1.RecurringTimeWindow.Builder.class);
}
private int bitField0_;
public static final int WINDOW_FIELD_NUMBER = 1;
private com.google.container.v1.TimeWindow window_;
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*
* @return Whether the window field is set.
*/
@java.lang.Override
public boolean hasWindow() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*
* @return The window.
*/
@java.lang.Override
public com.google.container.v1.TimeWindow getWindow() {
return window_ == null ? com.google.container.v1.TimeWindow.getDefaultInstance() : window_;
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
@java.lang.Override
public com.google.container.v1.TimeWindowOrBuilder getWindowOrBuilder() {
return window_ == null ? com.google.container.v1.TimeWindow.getDefaultInstance() : window_;
}
public static final int RECURRENCE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object recurrence_ = "";
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @return The recurrence.
*/
@java.lang.Override
public java.lang.String getRecurrence() {
java.lang.Object ref = recurrence_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recurrence_ = s;
return s;
}
}
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @return The bytes for recurrence.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRecurrenceBytes() {
java.lang.Object ref = recurrence_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
recurrence_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getWindow());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recurrence_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, recurrence_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWindow());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recurrence_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, recurrence_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1.RecurringTimeWindow)) {
return super.equals(obj);
}
com.google.container.v1.RecurringTimeWindow other =
(com.google.container.v1.RecurringTimeWindow) obj;
if (hasWindow() != other.hasWindow()) return false;
if (hasWindow()) {
if (!getWindow().equals(other.getWindow())) return false;
}
if (!getRecurrence().equals(other.getRecurrence())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasWindow()) {
hash = (37 * hash) + WINDOW_FIELD_NUMBER;
hash = (53 * hash) + getWindow().hashCode();
}
hash = (37 * hash) + RECURRENCE_FIELD_NUMBER;
hash = (53 * hash) + getRecurrence().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.RecurringTimeWindow parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1.RecurringTimeWindow parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.RecurringTimeWindow parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1.RecurringTimeWindow prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents an arbitrary window of time that recurs.
* </pre>
*
* Protobuf type {@code google.container.v1.RecurringTimeWindow}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1.RecurringTimeWindow)
com.google.container.v1.RecurringTimeWindowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_RecurringTimeWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_RecurringTimeWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.RecurringTimeWindow.class,
com.google.container.v1.RecurringTimeWindow.Builder.class);
}
// Construct using com.google.container.v1.RecurringTimeWindow.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWindowFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
window_ = null;
if (windowBuilder_ != null) {
windowBuilder_.dispose();
windowBuilder_ = null;
}
recurrence_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_RecurringTimeWindow_descriptor;
}
@java.lang.Override
public com.google.container.v1.RecurringTimeWindow getDefaultInstanceForType() {
return com.google.container.v1.RecurringTimeWindow.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1.RecurringTimeWindow build() {
com.google.container.v1.RecurringTimeWindow result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1.RecurringTimeWindow buildPartial() {
com.google.container.v1.RecurringTimeWindow result =
new com.google.container.v1.RecurringTimeWindow(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1.RecurringTimeWindow result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.window_ = windowBuilder_ == null ? window_ : windowBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.recurrence_ = recurrence_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1.RecurringTimeWindow) {
return mergeFrom((com.google.container.v1.RecurringTimeWindow) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1.RecurringTimeWindow other) {
if (other == com.google.container.v1.RecurringTimeWindow.getDefaultInstance()) return this;
if (other.hasWindow()) {
mergeWindow(other.getWindow());
}
if (!other.getRecurrence().isEmpty()) {
recurrence_ = other.recurrence_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getWindowFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
recurrence_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.container.v1.TimeWindow window_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1.TimeWindow,
com.google.container.v1.TimeWindow.Builder,
com.google.container.v1.TimeWindowOrBuilder>
windowBuilder_;
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*
* @return Whether the window field is set.
*/
public boolean hasWindow() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*
* @return The window.
*/
public com.google.container.v1.TimeWindow getWindow() {
if (windowBuilder_ == null) {
return window_ == null ? com.google.container.v1.TimeWindow.getDefaultInstance() : window_;
} else {
return windowBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public Builder setWindow(com.google.container.v1.TimeWindow value) {
if (windowBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
window_ = value;
} else {
windowBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public Builder setWindow(com.google.container.v1.TimeWindow.Builder builderForValue) {
if (windowBuilder_ == null) {
window_ = builderForValue.build();
} else {
windowBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public Builder mergeWindow(com.google.container.v1.TimeWindow value) {
if (windowBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& window_ != null
&& window_ != com.google.container.v1.TimeWindow.getDefaultInstance()) {
getWindowBuilder().mergeFrom(value);
} else {
window_ = value;
}
} else {
windowBuilder_.mergeFrom(value);
}
if (window_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public Builder clearWindow() {
bitField0_ = (bitField0_ & ~0x00000001);
window_ = null;
if (windowBuilder_ != null) {
windowBuilder_.dispose();
windowBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public com.google.container.v1.TimeWindow.Builder getWindowBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getWindowFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
public com.google.container.v1.TimeWindowOrBuilder getWindowOrBuilder() {
if (windowBuilder_ != null) {
return windowBuilder_.getMessageOrBuilder();
} else {
return window_ == null ? com.google.container.v1.TimeWindow.getDefaultInstance() : window_;
}
}
/**
*
*
* <pre>
* The window of the first recurrence.
* </pre>
*
* <code>.google.container.v1.TimeWindow window = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1.TimeWindow,
com.google.container.v1.TimeWindow.Builder,
com.google.container.v1.TimeWindowOrBuilder>
getWindowFieldBuilder() {
if (windowBuilder_ == null) {
windowBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1.TimeWindow,
com.google.container.v1.TimeWindow.Builder,
com.google.container.v1.TimeWindowOrBuilder>(
getWindow(), getParentForChildren(), isClean());
window_ = null;
}
return windowBuilder_;
}
private java.lang.Object recurrence_ = "";
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @return The recurrence.
*/
public java.lang.String getRecurrence() {
java.lang.Object ref = recurrence_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recurrence_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @return The bytes for recurrence.
*/
public com.google.protobuf.ByteString getRecurrenceBytes() {
java.lang.Object ref = recurrence_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
recurrence_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @param value The recurrence to set.
* @return This builder for chaining.
*/
public Builder setRecurrence(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
recurrence_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearRecurrence() {
recurrence_ = getDefaultInstance().getRecurrence();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how
* this window reccurs. They go on for the span of time between the start and
* end time.
*
* For example, to have something repeat every weekday, you'd use:
* `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`
*
* To repeat some window daily (equivalent to the DailyMaintenanceWindow):
* `FREQ=DAILY`
*
* For the first weekend of every month:
* `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`
*
* This specifies how frequently the window starts. Eg, if you wanted to have
* a 9-5 UTC-4 window every weekday, you'd use something like:
* ```
* start time = 2019-01-01T09:00:00-0400
* end time = 2019-01-01T17:00:00-0400
* recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
* ```
*
* Windows can span multiple days. Eg, to make the window encompass every
* weekend from midnight Saturday till the last minute of Sunday UTC:
* ```
* start time = 2019-01-05T00:00:00Z
* end time = 2019-01-07T23:59:00Z
* recurrence = FREQ=WEEKLY;BYDAY=SA
* ```
*
* Note the start and end time's specific dates are largely arbitrary except
* to specify duration of the window and when it first starts.
* The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported.
* </pre>
*
* <code>string recurrence = 2;</code>
*
* @param value The bytes for recurrence to set.
* @return This builder for chaining.
*/
public Builder setRecurrenceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
recurrence_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1.RecurringTimeWindow)
}
// @@protoc_insertion_point(class_scope:google.container.v1.RecurringTimeWindow)
private static final com.google.container.v1.RecurringTimeWindow DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1.RecurringTimeWindow();
}
public static com.google.container.v1.RecurringTimeWindow getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RecurringTimeWindow> PARSER =
new com.google.protobuf.AbstractParser<RecurringTimeWindow>() {
@java.lang.Override
public RecurringTimeWindow parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RecurringTimeWindow> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RecurringTimeWindow> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1.RecurringTimeWindow getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,891 | java-speech/proto-google-cloud-speech-v1p1beta1/src/main/java/com/google/cloud/speech/v1p1beta1/SpeechContext.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1p1beta1/cloud_speech.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.speech.v1p1beta1;
/**
*
*
* <pre>
* Provides "hints" to the speech recognizer to favor specific words and phrases
* in the results.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1p1beta1.SpeechContext}
*/
public final class SpeechContext extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1p1beta1.SpeechContext)
SpeechContextOrBuilder {
private static final long serialVersionUID = 0L;
// Use SpeechContext.newBuilder() to construct.
private SpeechContext(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SpeechContext() {
phrases_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SpeechContext();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1p1beta1.SpeechProto
.internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1p1beta1.SpeechProto
.internal_static_google_cloud_speech_v1p1beta1_SpeechContext_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1p1beta1.SpeechContext.class,
com.google.cloud.speech.v1p1beta1.SpeechContext.Builder.class);
}
public static final int PHRASES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList phrases_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @return A list containing the phrases.
*/
public com.google.protobuf.ProtocolStringList getPhrasesList() {
return phrases_;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @return The count of phrases.
*/
public int getPhrasesCount() {
return phrases_.size();
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param index The index of the element to return.
* @return The phrases at the given index.
*/
public java.lang.String getPhrases(int index) {
return phrases_.get(index);
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the phrases at the given index.
*/
public com.google.protobuf.ByteString getPhrasesBytes(int index) {
return phrases_.getByteString(index);
}
public static final int BOOST_FIELD_NUMBER = 4;
private float boost_ = 0F;
/**
*
*
* <pre>
* Hint Boost. Positive value will increase the probability that a specific
* phrase will be recognized over other similar sounding phrases. The higher
* the boost, the higher the chance of false positive recognition as well.
* Negative boost values would correspond to anti-biasing. Anti-biasing is not
* enabled, so negative boost will simply be ignored. Though `boost` can
* accept a wide range of positive values, most use cases are best served with
* values between 0 and 20. We recommend using a binary search approach to
* finding the optimal value for your use case.
* </pre>
*
* <code>float boost = 4;</code>
*
* @return The boost.
*/
@java.lang.Override
public float getBoost() {
return boost_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < phrases_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, phrases_.getRaw(i));
}
if (java.lang.Float.floatToRawIntBits(boost_) != 0) {
output.writeFloat(4, boost_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < phrases_.size(); i++) {
dataSize += computeStringSizeNoTag(phrases_.getRaw(i));
}
size += dataSize;
size += 1 * getPhrasesList().size();
}
if (java.lang.Float.floatToRawIntBits(boost_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, boost_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1p1beta1.SpeechContext)) {
return super.equals(obj);
}
com.google.cloud.speech.v1p1beta1.SpeechContext other =
(com.google.cloud.speech.v1p1beta1.SpeechContext) obj;
if (!getPhrasesList().equals(other.getPhrasesList())) return false;
if (java.lang.Float.floatToIntBits(getBoost())
!= java.lang.Float.floatToIntBits(other.getBoost())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPhrasesCount() > 0) {
hash = (37 * hash) + PHRASES_FIELD_NUMBER;
hash = (53 * hash) + getPhrasesList().hashCode();
}
hash = (37 * hash) + BOOST_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getBoost());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v1p1beta1.SpeechContext prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Provides "hints" to the speech recognizer to favor specific words and phrases
* in the results.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1p1beta1.SpeechContext}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1p1beta1.SpeechContext)
com.google.cloud.speech.v1p1beta1.SpeechContextOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1p1beta1.SpeechProto
.internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1p1beta1.SpeechProto
.internal_static_google_cloud_speech_v1p1beta1_SpeechContext_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1p1beta1.SpeechContext.class,
com.google.cloud.speech.v1p1beta1.SpeechContext.Builder.class);
}
// Construct using com.google.cloud.speech.v1p1beta1.SpeechContext.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
phrases_ = com.google.protobuf.LazyStringArrayList.emptyList();
boost_ = 0F;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v1p1beta1.SpeechProto
.internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v1p1beta1.SpeechContext getDefaultInstanceForType() {
return com.google.cloud.speech.v1p1beta1.SpeechContext.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v1p1beta1.SpeechContext build() {
com.google.cloud.speech.v1p1beta1.SpeechContext result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v1p1beta1.SpeechContext buildPartial() {
com.google.cloud.speech.v1p1beta1.SpeechContext result =
new com.google.cloud.speech.v1p1beta1.SpeechContext(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.speech.v1p1beta1.SpeechContext result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
phrases_.makeImmutable();
result.phrases_ = phrases_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.boost_ = boost_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1p1beta1.SpeechContext) {
return mergeFrom((com.google.cloud.speech.v1p1beta1.SpeechContext) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1p1beta1.SpeechContext other) {
if (other == com.google.cloud.speech.v1p1beta1.SpeechContext.getDefaultInstance())
return this;
if (!other.phrases_.isEmpty()) {
if (phrases_.isEmpty()) {
phrases_ = other.phrases_;
bitField0_ |= 0x00000001;
} else {
ensurePhrasesIsMutable();
phrases_.addAll(other.phrases_);
}
onChanged();
}
if (other.getBoost() != 0F) {
setBoost(other.getBoost());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensurePhrasesIsMutable();
phrases_.add(s);
break;
} // case 10
case 37:
{
boost_ = input.readFloat();
bitField0_ |= 0x00000002;
break;
} // case 37
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList phrases_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensurePhrasesIsMutable() {
if (!phrases_.isModifiable()) {
phrases_ = new com.google.protobuf.LazyStringArrayList(phrases_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @return A list containing the phrases.
*/
public com.google.protobuf.ProtocolStringList getPhrasesList() {
phrases_.makeImmutable();
return phrases_;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @return The count of phrases.
*/
public int getPhrasesCount() {
return phrases_.size();
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param index The index of the element to return.
* @return The phrases at the given index.
*/
public java.lang.String getPhrases(int index) {
return phrases_.get(index);
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the phrases at the given index.
*/
public com.google.protobuf.ByteString getPhrasesBytes(int index) {
return phrases_.getByteString(index);
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param index The index to set the value at.
* @param value The phrases to set.
* @return This builder for chaining.
*/
public Builder setPhrases(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePhrasesIsMutable();
phrases_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param value The phrases to add.
* @return This builder for chaining.
*/
public Builder addPhrases(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePhrasesIsMutable();
phrases_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param values The phrases to add.
* @return This builder for chaining.
*/
public Builder addAllPhrases(java.lang.Iterable<java.lang.String> values) {
ensurePhrasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, phrases_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearPhrases() {
phrases_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of strings containing words and phrases "hints" so that
* the speech recognition is more likely to recognize them. This can be used
* to improve the accuracy for specific words and phrases, for example, if
* specific commands are typically spoken by the user. This can also be used
* to add additional words to the vocabulary of the recognizer. See
* [usage limits](https://cloud.google.com/speech-to-text/quotas#content).
*
* List items can also be set to classes for groups of words that represent
* common concepts that occur in natural language. For example, rather than
* providing phrase hints for every month of the year, using the $MONTH class
* improves the likelihood of correctly transcribing audio that includes
* months.
* </pre>
*
* <code>repeated string phrases = 1;</code>
*
* @param value The bytes of the phrases to add.
* @return This builder for chaining.
*/
public Builder addPhrasesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensurePhrasesIsMutable();
phrases_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private float boost_;
/**
*
*
* <pre>
* Hint Boost. Positive value will increase the probability that a specific
* phrase will be recognized over other similar sounding phrases. The higher
* the boost, the higher the chance of false positive recognition as well.
* Negative boost values would correspond to anti-biasing. Anti-biasing is not
* enabled, so negative boost will simply be ignored. Though `boost` can
* accept a wide range of positive values, most use cases are best served with
* values between 0 and 20. We recommend using a binary search approach to
* finding the optimal value for your use case.
* </pre>
*
* <code>float boost = 4;</code>
*
* @return The boost.
*/
@java.lang.Override
public float getBoost() {
return boost_;
}
/**
*
*
* <pre>
* Hint Boost. Positive value will increase the probability that a specific
* phrase will be recognized over other similar sounding phrases. The higher
* the boost, the higher the chance of false positive recognition as well.
* Negative boost values would correspond to anti-biasing. Anti-biasing is not
* enabled, so negative boost will simply be ignored. Though `boost` can
* accept a wide range of positive values, most use cases are best served with
* values between 0 and 20. We recommend using a binary search approach to
* finding the optimal value for your use case.
* </pre>
*
* <code>float boost = 4;</code>
*
* @param value The boost to set.
* @return This builder for chaining.
*/
public Builder setBoost(float value) {
boost_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint Boost. Positive value will increase the probability that a specific
* phrase will be recognized over other similar sounding phrases. The higher
* the boost, the higher the chance of false positive recognition as well.
* Negative boost values would correspond to anti-biasing. Anti-biasing is not
* enabled, so negative boost will simply be ignored. Though `boost` can
* accept a wide range of positive values, most use cases are best served with
* values between 0 and 20. We recommend using a binary search approach to
* finding the optimal value for your use case.
* </pre>
*
* <code>float boost = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearBoost() {
bitField0_ = (bitField0_ & ~0x00000002);
boost_ = 0F;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1p1beta1.SpeechContext)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.SpeechContext)
private static final com.google.cloud.speech.v1p1beta1.SpeechContext DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1p1beta1.SpeechContext();
}
public static com.google.cloud.speech.v1p1beta1.SpeechContext getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SpeechContext> PARSER =
new com.google.protobuf.AbstractParser<SpeechContext>() {
@java.lang.Override
public SpeechContext parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SpeechContext> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SpeechContext> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v1p1beta1.SpeechContext getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/derby | 36,733 | java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/jdbc4/PreparedStatementTest42.java | /*
*
* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbc4.PreparedStatementTest42
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.jdbc4;
import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.JDBCType;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import junit.framework.Test;
import org.apache.derby.iapi.types.HarmonySerialBlob;
import org.apache.derby.iapi.types.HarmonySerialClob;
import org.apache.derbyTesting.functionTests.tests.lang.Price;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.BaseTestSuite;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Tests for new methods added for PreparedStatement in JDBC 4.2.
*/
public class PreparedStatementTest42 extends BaseJDBCTestCase
{
//////////////////////////////////////////////////////////
//
// CONSTANTS
//
//////////////////////////////////////////////////////////
private static final String UNIMPLEMENTED_FEATURE = "0A000";
//
// If any of these becomes a legal Derby type, remove it from this table and put a corresponding line
// into _columnDescs.
//
private static final JDBCType[] ILLEGAL_JDBC_TYPES = new JDBCType[]
{
JDBCType.ARRAY,
JDBCType.DATALINK,
JDBCType.DISTINCT,
JDBCType.LONGNVARCHAR,
JDBCType.NCHAR,
JDBCType.NCLOB,
JDBCType.NULL,
JDBCType.NVARCHAR,
JDBCType.OTHER,
JDBCType.REF,
JDBCType.REF_CURSOR,
JDBCType.ROWID,
JDBCType.SQLXML,
JDBCType.STRUCT,
};
private static final int[] ILLEGAL_SQL_TYPES = new int[]
{
Types.ARRAY,
Types.DATALINK,
Types.DISTINCT,
Types.LONGNVARCHAR,
Types.NCHAR,
Types.NCLOB,
Types.NVARCHAR,
Types.OTHER,
Types.REF,
Types.REF_CURSOR,
Types.ROWID,
Types.SQLXML,
Types.STRUCT,
};
//////////////////////////////////////////////////////////
//
// STATE
//
//////////////////////////////////////////////////////////
private static ColumnDesc[] _columnDescs =
{
new ColumnDesc( JDBCType.BIGINT, "bigint", 0L, 1L, null ),
new ColumnDesc( JDBCType.BLOB, "blob", makeBlob( "01234" ), makeBlob( "56789" ), null ),
new ColumnDesc( JDBCType.BOOLEAN, "boolean", Boolean.FALSE, Boolean.TRUE, null ),
new ColumnDesc( JDBCType.CHAR, "char( 5 )", "01234", "56789", null ),
new ColumnDesc( JDBCType.BINARY, "char( 5 ) for bit data", makeBinary( "01234" ), makeBinary( "56789" ), null ),
new ColumnDesc( JDBCType.CLOB, "clob", makeClob( "01234" ), makeClob( "56789" ), null ),
new ColumnDesc( JDBCType.DATE, "date", new Date( 0L ), new Date( 1L ), null ),
new ColumnDesc( JDBCType.DECIMAL, "decimal", new BigDecimal( 0 ), new BigDecimal( 1 ), null ),
new ColumnDesc( JDBCType.DOUBLE, "double", 0.0, 1.0, null ),
new ColumnDesc( JDBCType.FLOAT, "float", 0.0, 1.0, null ),
new ColumnDesc( JDBCType.INTEGER, "int", 0, 1, null ),
new ColumnDesc( JDBCType.LONGVARCHAR, "long varchar", "01234", "56789", null ),
new ColumnDesc( JDBCType.LONGVARBINARY, "long varchar for bit data", makeBinary( "01234" ), makeBinary( "56789" ), null ),
new ColumnDesc( JDBCType.NUMERIC, "numeric", new BigDecimal( 0 ), new BigDecimal( 1 ), null ),
new ColumnDesc( JDBCType.REAL, "real", 0.0F, 1F, null ),
new ColumnDesc( JDBCType.SMALLINT, "smallint", 0, 1, null ),
new ColumnDesc( JDBCType.TIME, "time", new Time( 0L ), new Time( 1L ), null ),
new ColumnDesc( JDBCType.TIMESTAMP, "timestamp", new Timestamp( 0L ), new Timestamp( 1L ), null ),
new ColumnDesc( JDBCType.JAVA_OBJECT, "Price", makePrice( 0L ), makePrice( 1L ), null ),
new ColumnDesc( JDBCType.VARCHAR, "varchar( 5 )", "01234", "56789", null ),
new ColumnDesc( JDBCType.VARBINARY, "varchar( 5 ) for bit data", makeBinary( "01234" ), makeBinary( "56789" ), null ),
// get/setObject on XML not supported because Derby does not support SQLXML yet
};
//////////////////////////////////////////////////////////
//
// NESTED CLASSES
//
//////////////////////////////////////////////////////////
public static final class ColumnDesc
{
public static final int VALUE_COUNT = 3;
public final JDBCType jdbcType;
public final String sqlType;
public final Object[] values;
public ColumnDesc
(
JDBCType jdbcType,
String sqlType,
Object... values
)
{
this.jdbcType = jdbcType;
this.sqlType = sqlType;
this.values = values;
if ( values.length != VALUE_COUNT )
{
throw new IllegalArgumentException( "Expected " + VALUE_COUNT + " values but saw " + values.length );
}
}
}
//////////////////////////////////////////////////////////
//
// CONSTRUCTOR
//
//////////////////////////////////////////////////////////
/**
* Create a new test with the given name.
*/
public PreparedStatementTest42( String name ) { super(name); }
//////////////////////////////////////////////////////////
//
// JUnit MACHINERY
//
//////////////////////////////////////////////////////////
public static Test suite()
{
BaseTestSuite suite = new BaseTestSuite("PreparedStatementTest42");
suite.addTest( TestConfiguration.defaultSuite( PreparedStatementTest42.class ) );
return suite;
}
//////////////////////////////////////////////////////////
//
// TESTS
//
//////////////////////////////////////////////////////////
/**
* <p>
* Test the setObject() overloads added by JDBC 4.2.
* </p>
*/
public void test_01_setObject() throws Exception
{
Connection conn = getConnection();
setupPrice( conn );
makeTable( conn );
populateTable( conn );
vetTableContents( conn );
updateColumns( conn );
}
private void makeTable( Connection conn ) throws Exception
{
StringBuilder buffer = new StringBuilder();
buffer.append( "create table allTypes\n(\n" );
buffer.append( "\tcol0\tint generated always as identity" );
for ( int i = 0; i < _columnDescs.length; i++ )
{
ColumnDesc cd = _columnDescs[ i ];
String columnName = "col" + (i+1);
String columnType = cd.sqlType;
buffer.append( "\n\t, " + columnName + "\t" + columnType );
}
buffer.append( "\n)" );
conn.prepareStatement( buffer.toString() ).execute();
}
private void populateTable( Connection conn ) throws Exception
{
PreparedStatement insert = prepareInsert( conn );
for ( int rowIdx = 0; rowIdx < ColumnDesc.VALUE_COUNT; rowIdx++ )
{
insertRow( insert, rowIdx );
}
for ( int rowIdx = 0; rowIdx < ColumnDesc.VALUE_COUNT; rowIdx++ )
{
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
insert.setObject( colIdx + 1, cd.values[ rowIdx ], cd.jdbcType, 0 );
}
insert.executeUpdate();
}
// verify that certain SQLTypes are illegal
for ( int i = 0; i < ILLEGAL_JDBC_TYPES.length; i++ )
{
try {
insert.setObject( 1, null, ILLEGAL_JDBC_TYPES[ i ] );
fail( "setObject() should have failed." );
}
catch (SQLException se) { assertUnimplemented( se ); }
}
insert.close();
}
private static void assertUnimplemented( SQLException se ) throws Exception
{
assertSQLState( UNIMPLEMENTED_FEATURE, se );
assertTrue( se instanceof SQLFeatureNotSupportedException );
}
private PreparedStatement prepareInsert( Connection conn ) throws Exception
{
StringBuilder columnBuffer = new StringBuilder();
StringBuilder valuesBuffer = new StringBuilder();
columnBuffer.append( "( " );
valuesBuffer.append( "( " );
for ( int i = 0; i < _columnDescs.length; i++ )
{
String columnName = "col" + (i+1);
if ( i > 0 )
{
columnBuffer.append( ", " );
valuesBuffer.append( ", " );
}
columnBuffer.append( columnName );
valuesBuffer.append( "?" );
}
columnBuffer.append( " )" );
valuesBuffer.append( " )" );
PreparedStatement insert = conn.prepareStatement
( "insert into allTypes " + columnBuffer.toString() + " values " + valuesBuffer.toString() );
return insert;
}
private void insertRow( PreparedStatement insert, int rowIdx ) throws Exception
{
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
insert.setObject( colIdx + 1, cd.values[ rowIdx ], cd.jdbcType );
}
insert.executeUpdate();
}
private void vetTableContents( Connection conn ) throws Exception
{
PreparedStatement selectPS = conn.prepareStatement( "select * from allTypes order by col0" );
ResultSet selectRS = selectPS.executeQuery();
int rowCount = 0;
while( selectRS.next() )
{
int rowIdx = rowCount % ColumnDesc.VALUE_COUNT;
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
Object expected = _columnDescs[ colIdx ].values[ rowIdx ];
// skip the first column, the primary key
assertObjectEquals( expected, selectRS.getObject( colIdx + 2 ) );
}
rowCount++;
}
selectRS.close();
selectPS.close();
}
// test the behavior of the new ResultSet methods added by JDBC 4.2
private void updateColumns( Connection conn ) throws Exception
{
PreparedStatement forUpdatePS = conn.prepareStatement
( "select * from allTypes for update", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE );
ResultSet updateRS = null;
// ResultSet.updateObject( int, Object, SQLType )
prepTable( conn, 0 );
updateRS = forUpdatePS.executeQuery();
updateRS.next();
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
updateRS.updateObject( colIdx + 2, cd.values[ 1 ], cd.jdbcType );
}
updateRS.updateRow();
updateRS.close();
vetTable( conn, 1, 1 );
// ResultSet.updateObject( int, Object, SQLType, int )
prepTable( conn, 0 );
updateRS = forUpdatePS.executeQuery();
updateRS.next();
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
updateRS.updateObject( colIdx + 2, cd.values[ 1 ], cd.jdbcType, 0 );
}
updateRS.updateRow();
updateRS.close();
vetTable( conn, 1, 1 );
// ResultSet.updateObject( String, Object, SQLType )
prepTable( conn, 0 );
updateRS = forUpdatePS.executeQuery();
updateRS.next();
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
updateRS.updateObject( "col" + (colIdx+1), cd.values[ 1 ], cd.jdbcType );
}
updateRS.updateRow();
updateRS.close();
vetTable( conn, 1, 1 );
// ResultSet.updateObject( String, Object, SQLType, int )
prepTable( conn, 0 );
updateRS = forUpdatePS.executeQuery();
updateRS.next();
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
ColumnDesc cd = _columnDescs[ colIdx ];
updateRS.updateObject( "col" + (colIdx+1), cd.values[ 1 ], cd.jdbcType, 0 );
}
updateRS.updateRow();
updateRS.close();
vetTable( conn, 1, 1 );
// verify that ResultSet.updateObject() fails on bad SQLTypes
prepTable( conn, 0 );
updateRS = forUpdatePS.executeQuery();
updateRS.next();
println( "Testing ResultSet.updateObject() on illegal types." );
for ( int i = 0; i < ILLEGAL_JDBC_TYPES.length; i++ )
{
try {
updateRS.updateObject( 2, _columnDescs[ 0 ].values[ 1 ], ILLEGAL_JDBC_TYPES[ i ] );
fail( "updateObject() should have failed." );
}
catch (SQLException se) { assertUnimplemented( se ); }
try {
updateRS.updateObject( 2, _columnDescs[ 0 ].values[ 1 ], ILLEGAL_JDBC_TYPES[ i ], 0 );
fail( "updateObject() should have failed." );
}
catch (SQLException se) { assertUnimplemented( se ); }
try {
updateRS.updateObject( "col2", _columnDescs[ 0 ].values[ 1 ], ILLEGAL_JDBC_TYPES[ i ] );
fail( "updateObject() should have failed." );
}
catch (SQLException se) { assertUnimplemented( se ); }
try {
updateRS.updateObject( "col2", _columnDescs[ 0 ].values[ 1 ], ILLEGAL_JDBC_TYPES[ i ], 0 );
fail( "updateObject() should have failed." );
}
catch (SQLException se) { assertUnimplemented( se ); }
}
updateRS.close();
vetTable( conn, 0, 1 );
}
private void prepTable( Connection conn, int rowIdx ) throws Exception
{
conn.prepareStatement( "truncate table allTypes" ).execute();
PreparedStatement insert = prepareInsert( conn );
insertRow( insert, rowIdx );
vetTable( conn,rowIdx, 1 );
}
private void vetTable( Connection conn, int rowIdx, int expectedRowCount ) throws Exception
{
PreparedStatement selectPS = conn.prepareStatement( "select * from allTypes order by col0" );
ResultSet selectRS = selectPS.executeQuery();
int actualRowCount = 0;
while( selectRS.next() )
{
for ( int colIdx = 0; colIdx < _columnDescs.length; colIdx++ )
{
Object expected = _columnDescs[ colIdx ].values[ rowIdx ];
// skip the first column, the primary key
assertObjectEquals( expected, selectRS.getObject( colIdx + 2 ) );
}
actualRowCount++;
}
assertEquals( expectedRowCount, actualRowCount );
selectRS.close();
selectPS.close();
}
/**
* <p>
* Test the CallableStatement.registerObject() overloads added by JDBC 4.2.
* </p>
*/
public void test_02_registerObject() throws Exception
{
Connection conn = getConnection();
registerObjectTest( conn );
}
public static void registerObjectTest( Connection conn ) throws Exception
{
createSchemaObjects( conn );
vetProc( conn );
}
private static void createSchemaObjects( Connection conn ) throws Exception
{
setupPrice( conn );
createProc( conn );
}
private static void createProc( Connection conn ) throws Exception
{
StringBuilder buffer = new StringBuilder();
buffer.append( "create procedure unpackAllTypes( in valueIdx int" );
for ( int i = 0; i < _columnDescs.length; i++ )
{
ColumnDesc cd = _columnDescs[ i ];
String parameterName = "param" + (i+1);
String parameterType = cd.sqlType;
buffer.append( ", out " + parameterName + " " + parameterType );
}
buffer.append( " ) language java parameter style java no sql\n" );
buffer.append( "external name 'org.apache.derbyTesting.functionTests.tests.jdbc4.PreparedStatementTest42.unpackAllTypes'" );
String sqlText = buffer.toString();
println( sqlText );
conn.prepareStatement( sqlText ).execute();
}
private static void vetProc( Connection conn ) throws Exception
{
StringBuilder buffer = new StringBuilder();
buffer.append( "call unpackAllTypes( ?" );
for ( int i = 0; i < _columnDescs.length; i++ ) { buffer.append( ", ?" ); }
buffer.append( " )" );
String sqlText = buffer.toString();
println( sqlText );
CallableStatement cs = conn.prepareCall( sqlText );
int valueIdx;
int param;
// registerOutParameter( int, SQLType )
valueIdx = 0;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < _columnDescs.length; i++ )
{
cs.registerOutParameter( param++, _columnDescs[ i ].jdbcType );
}
cs.execute();
vetCS( cs, valueIdx );
// registerOutParameter( int, SQLType, int )
valueIdx = 1;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < _columnDescs.length; i++ )
{
cs.registerOutParameter( param++, _columnDescs[ i ].jdbcType, 0 );
}
cs.execute();
vetCS( cs, valueIdx );
// registerOutParameter( int, SQLType, String )
valueIdx = 0;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < _columnDescs.length; i++ )
{
cs.registerOutParameter( param++, _columnDescs[ i ].jdbcType, "foo" );
}
cs.execute();
vetCS( cs, valueIdx );
// Negative test
valueIdx = 1;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < ILLEGAL_JDBC_TYPES.length; i++ )
{
try {
cs.registerOutParameter( param++, ILLEGAL_JDBC_TYPES[ i ], 0 );
fail();
}
catch (SQLException se) { assertUnimplemented( se ); }
}
// registerOutParameter( String, SQLType )
try {
cs.registerOutParameter( "param1", _columnDescs[ 0 ].jdbcType );
fail( "Expected unimplemented feature." );
}
catch (SQLException se) { assertUnimplemented( se ); }
// registerOutParameter( String, SQLType, int )
try {
cs.registerOutParameter( "param1", _columnDescs[ 0 ].jdbcType, 0 );
fail( "Expected unimplemented feature." );
}
catch (SQLException se) { assertUnimplemented( se ); }
// registerOutParameter( String, SQLType, String )
try {
cs.registerOutParameter( "param1", _columnDescs[ 0 ].jdbcType, "foo" );
fail( "Expected unimplemented feature." );
}
catch (SQLException se) { assertUnimplemented( se ); }
// Make sure that the pre-JDBC4.2 overloads throw the correct exception too
valueIdx = 1;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < ILLEGAL_SQL_TYPES.length; i++ )
{
int type = ILLEGAL_SQL_TYPES[ i ];
try {
cs.registerOutParameter( param++, type, 0 );
fail( "Should not have been able to register parameter type " + type );
} catch (SQLException se) { assertUnimplemented( se ); }
}
}
private static void vetCS( CallableStatement cs, int valueIdx )
throws Exception
{
int idx = 0;
int colIdx = 2;
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
assertObjectEquals( _columnDescs[ idx++ ].values[ valueIdx ], cs.getObject( colIdx++ ) );
}
/**
* <p>
* Test the CallableStatement.setObject() overloads added by JDBC 4.2.
* </p>
*/
public void test_03_setObject() throws Exception
{
Connection conn = getConnection();
callableStatementSetObjectTest( conn );
}
public static void callableStatementSetObjectTest( Connection conn ) throws Exception
{
createSetObjectSchemaObjects( conn );
vetSetObjectProc( conn );
}
private static void createSetObjectSchemaObjects( Connection conn ) throws Exception
{
setupPrice( conn );
createSetObjectProc( conn );
}
private static void createSetObjectProc( Connection conn ) throws Exception
{
StringBuilder buffer = new StringBuilder();
buffer.append( "create procedure packAllTypes( in valueIdx int" );
for ( int i = 0; i < _columnDescs.length; i++ )
{
ColumnDesc cd = _columnDescs[ i ];
String parameterName = "param" + (i+1);
String parameterType = cd.sqlType;
buffer.append( ", in " + parameterName + " " + parameterType );
}
buffer.append( " ) language java parameter style java no sql\n" );
buffer.append( "external name 'org.apache.derbyTesting.functionTests.tests.jdbc4.PreparedStatementTest42.packAllTypes'" );
String sqlText = buffer.toString();
println( sqlText );
conn.prepareStatement( sqlText ).execute();
}
private static void vetSetObjectProc( Connection conn ) throws Exception
{
StringBuilder buffer = new StringBuilder();
buffer.append( "call packAllTypes( ?" );
for ( int i = 0; i < _columnDescs.length; i++ ) { buffer.append( ", ?" ); }
buffer.append( " )" );
String sqlText = buffer.toString();
println( sqlText );
CallableStatement cs = conn.prepareCall( sqlText );
int valueIdx;
int param;
// setObject( int, Object, SQLType )
valueIdx = 0;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < _columnDescs.length; i++ )
{
ColumnDesc cd = _columnDescs[ i ];
cs.setObject( param++, cd.values[ valueIdx ], cd.jdbcType );
}
cs.execute();
// setObject( int, Object, SQLType, int )
valueIdx = 1;
param = 1;
cs.setInt( param++, valueIdx );
for ( int i = 0; i < _columnDescs.length; i++ )
{
ColumnDesc cd = _columnDescs[ i ];
cs.setObject( param++, cd.values[ valueIdx ], cd.jdbcType, 0 );
}
cs.execute();
// setObject( String, Object, SQLType )
try {
ColumnDesc cd = _columnDescs[ 0 ];
cs.setObject( "param1", cd.values[ 0 ], cd.jdbcType );
fail( "Expected unimplemented feature." );
}
catch (SQLException se) { assertUnimplemented( se ); }
// setObject( String, Object, SQLType, int )
try {
ColumnDesc cd = _columnDescs[ 0 ];
cs.setObject( "param1", cd.values[ 0 ], cd.jdbcType, 0 );
fail( "Expected unimplemented feature." );
}
catch (SQLException se) { assertUnimplemented( se ); }
}
/**
* DERBY-6081: Verify that an SQLException is raised if the supplied
* SQLType argument is null. It used to fail with a NullPointerException.
*/
public void test_04_targetTypeIsNull() throws Exception
{
setAutoCommit(false);
// Test PreparedStatement.setObject() with targetType == null.
PreparedStatement ps = prepareStatement("values cast(? as int)");
try {
ps.setObject(1, 1, null);
fail("setObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
try {
ps.setObject(1, 1, null, 1);
fail("setObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
// Test ResultSet.updateObject() with targetType == null.
Statement s = createStatement(
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE);
s.execute("create table t(x int)");
s.execute("insert into t values 1");
ResultSet rs = s.executeQuery("select * from t");
assertTrue(rs.next());
try {
rs.updateObject("x", 1, null);
fail("updateObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
try {
rs.updateObject(1, 1, null);
fail("updateObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
try {
rs.updateObject("x", 1, null, 1);
fail("updateObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
try {
rs.updateObject(1, 1, null, 1);
fail("updateObject should fail when type is null");
} catch (SQLException se) { assertUnimplemented( se ); }
// There should be no more rows.
JDBC.assertEmpty(rs);
}
//////////////////////////////////////////////////////////
//
// SQL ROUTINES
//
//////////////////////////////////////////////////////////
public static void unpackAllTypes
(
int valueIdx,
Long[] bigintValue,
Blob[] blobValue,
Boolean[] booleanValue,
String[] charValue,
byte[][] binaryValue,
Clob[] clobValue,
Date[] dateValue,
BigDecimal[] decimalValue,
Double[] doubleValue,
Double[] floatValue,
Integer[] intValue,
String[] longVarcharValue,
byte[][] longVarbinaryValue,
BigDecimal[] numericValue,
Float[] realValue,
Integer[] smallintValue,
Time[] timeValue,
Timestamp[] timestampValue,
Price[] priceValue,
String[] varcharValue,
byte[][] varbinaryValue
)
{
int colIdx = 0;
bigintValue[ 0 ] = (Long) _columnDescs[ colIdx++ ].values[ valueIdx ];
blobValue[ 0 ] = (Blob) _columnDescs[ colIdx++ ].values[ valueIdx ];
booleanValue[ 0 ] = (Boolean) _columnDescs[ colIdx++ ].values[ valueIdx ];
charValue[ 0 ] = (String) _columnDescs[ colIdx++ ].values[ valueIdx ];
binaryValue[ 0 ] = (byte[]) _columnDescs[ colIdx++ ].values[ valueIdx ];
clobValue[ 0 ] = (Clob) _columnDescs[ colIdx++ ].values[ valueIdx ];
dateValue[ 0 ] = (Date) _columnDescs[ colIdx++ ].values[ valueIdx ];
decimalValue[ 0 ] = (BigDecimal) _columnDescs[ colIdx++ ].values[ valueIdx ];
doubleValue[ 0 ] = (Double) _columnDescs[ colIdx++ ].values[ valueIdx ];
floatValue[ 0 ] = (Double) _columnDescs[ colIdx++ ].values[ valueIdx ];
intValue[ 0 ] = (Integer) _columnDescs[ colIdx++ ].values[ valueIdx ];
longVarcharValue[ 0 ] = (String) _columnDescs[ colIdx++ ].values[ valueIdx ];
longVarbinaryValue[ 0 ] = (byte[]) _columnDescs[ colIdx++ ].values[ valueIdx ];
numericValue[ 0 ] = (BigDecimal) _columnDescs[ colIdx++ ].values[ valueIdx ];
realValue[ 0 ] = (Float) _columnDescs[ colIdx++ ].values[ valueIdx ];
smallintValue[ 0 ] = (Integer) _columnDescs[ colIdx++ ].values[ valueIdx ];
timeValue[ 0 ] = (Time) _columnDescs[ colIdx++ ].values[ valueIdx ];
timestampValue[ 0 ] = (Timestamp) _columnDescs[ colIdx++ ].values[ valueIdx ];
priceValue[ 0 ] = (Price) _columnDescs[ colIdx++ ].values[ valueIdx ];
varcharValue[ 0 ] = (String) _columnDescs[ colIdx++ ].values[ valueIdx ];
varbinaryValue[ 0 ] = (byte[]) _columnDescs[ colIdx++ ].values[ valueIdx ];
}
public static void packAllTypes
(
int valueIdx,
Long bigintValue,
Blob blobValue,
Boolean booleanValue,
String charValue,
byte[] binaryValue,
Clob clobValue,
Date dateValue,
BigDecimal decimalValue,
Double doubleValue,
Double floatValue,
Integer intValue,
String longVarcharValue,
byte[] longVarbinaryValue,
BigDecimal numericValue,
Float realValue,
Integer smallintValue,
Time timeValue,
Timestamp timestampValue,
Price priceValue,
String varcharValue,
byte[] varbinaryValue
)
throws Exception
{
int colIdx = 0;
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], bigintValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], blobValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], booleanValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], charValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], binaryValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], clobValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], dateValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], decimalValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], doubleValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], floatValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], intValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], longVarcharValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], longVarbinaryValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], numericValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], realValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], smallintValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], timeValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], timestampValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], priceValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], varcharValue );
assertObjectEquals( _columnDescs[ colIdx++ ].values[ valueIdx ], varbinaryValue );
}
//////////////////////////////////////////////////////////
//
// MINIONS
//
//////////////////////////////////////////////////////////
private static void setupPrice( Connection conn ) throws Exception
{
if ( !aliasExists( conn, "PRICE" ) )
{
conn.prepareStatement
(
"create type Price external name 'org.apache.derbyTesting.functionTests.tests.lang.Price' language java"
).execute();
}
}
private static boolean aliasExists( Connection conn, String aliasName ) throws Exception
{
PreparedStatement ps = conn.prepareStatement( "select count(*) from sys.sysaliases where alias = ?" );
ps.setString( 1, aliasName );
ResultSet rs = ps.executeQuery();
rs.next();
int retval = rs.getInt( 1 );
rs.close();
ps.close();
return (retval > 0);
}
private static Blob makeBlob( String contents )
{
return new HarmonySerialBlob( makeBinary( contents ) );
}
private static Clob makeClob( String contents )
{
return new HarmonySerialClob( contents );
}
private static byte[] makeBinary( String contents )
{
try {
return contents.getBytes( "UTF-8" );
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
private static Price makePrice( long raw )
{
return Price.makePrice( new BigDecimal( raw ) );
}
public static void assertObjectEquals( Object expected, Object actual ) throws Exception
{
if ( expected == null )
{
assertNull( actual );
return;
}
else if ( actual == null )
{
assertNull( expected );
return;
}
else if ( expected instanceof Blob ) { assertEquals( (Blob) expected, (Blob) actual ); }
else if ( expected instanceof Clob ) { assertEquals( (Clob) expected, (Clob) actual ); }
else if ( expected instanceof byte[] ) { compareBytes( (byte[]) expected, (byte[]) actual ); }
else { assertEquals( expected.toString(), actual.toString() ); }
}
private static void compareBytes( byte[] left, byte[] right )
throws Exception
{
int count = left.length;
if ( count != right.length )
{
fail("left count = " + count + " but right count = " + right.length );
}
for ( int i = 0; i < count; i++ )
{
if ( left[ i ] != right[ i ] )
{
fail( "left[ " + i + " ] = " + left[ i ] + " but right[ " + i + " ] = " + right[ i ] );
}
}
}
}
|
google/j2cl | 37,069 | transpiler/java/com/google/j2cl/transpiler/passes/ConversionContextVisitor.java | /*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.transpiler.passes;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.j2cl.transpiler.ast.AstUtils.isBoxableJsEnumType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.j2cl.common.SourcePosition;
import com.google.j2cl.transpiler.ast.AbstractRewriter;
import com.google.j2cl.transpiler.ast.ArrayAccess;
import com.google.j2cl.transpiler.ast.ArrayCreationReference;
import com.google.j2cl.transpiler.ast.ArrayLength;
import com.google.j2cl.transpiler.ast.ArrayLiteral;
import com.google.j2cl.transpiler.ast.ArrayTypeDescriptor;
import com.google.j2cl.transpiler.ast.AssertStatement;
import com.google.j2cl.transpiler.ast.AstUtils;
import com.google.j2cl.transpiler.ast.BinaryExpression;
import com.google.j2cl.transpiler.ast.Block;
import com.google.j2cl.transpiler.ast.BreakStatement;
import com.google.j2cl.transpiler.ast.CastExpression;
import com.google.j2cl.transpiler.ast.ConditionalExpression;
import com.google.j2cl.transpiler.ast.ContinueStatement;
import com.google.j2cl.transpiler.ast.DeclaredTypeDescriptor;
import com.google.j2cl.transpiler.ast.EmbeddedStatement;
import com.google.j2cl.transpiler.ast.Expression;
import com.google.j2cl.transpiler.ast.ExpressionStatement;
import com.google.j2cl.transpiler.ast.ExpressionWithComment;
import com.google.j2cl.transpiler.ast.Field;
import com.google.j2cl.transpiler.ast.FieldDeclarationStatement;
import com.google.j2cl.transpiler.ast.ForEachStatement;
import com.google.j2cl.transpiler.ast.FunctionExpression;
import com.google.j2cl.transpiler.ast.HasSourcePosition;
import com.google.j2cl.transpiler.ast.IfStatement;
import com.google.j2cl.transpiler.ast.InstanceOfExpression;
import com.google.j2cl.transpiler.ast.Invocation;
import com.google.j2cl.transpiler.ast.JsConstructorReference;
import com.google.j2cl.transpiler.ast.JsDocCastExpression;
import com.google.j2cl.transpiler.ast.JsDocExpression;
import com.google.j2cl.transpiler.ast.JsForInStatement;
import com.google.j2cl.transpiler.ast.LabeledStatement;
import com.google.j2cl.transpiler.ast.Literal;
import com.google.j2cl.transpiler.ast.LocalClassDeclarationStatement;
import com.google.j2cl.transpiler.ast.LocalFunctionDeclarationStatement;
import com.google.j2cl.transpiler.ast.LoopStatement;
import com.google.j2cl.transpiler.ast.MemberDescriptor;
import com.google.j2cl.transpiler.ast.MemberReference;
import com.google.j2cl.transpiler.ast.MethodDescriptor.ParameterDescriptor;
import com.google.j2cl.transpiler.ast.MethodLike;
import com.google.j2cl.transpiler.ast.MethodReference;
import com.google.j2cl.transpiler.ast.MultiExpression;
import com.google.j2cl.transpiler.ast.NewArray;
import com.google.j2cl.transpiler.ast.Node;
import com.google.j2cl.transpiler.ast.PostfixExpression;
import com.google.j2cl.transpiler.ast.PostfixOperator;
import com.google.j2cl.transpiler.ast.PrefixExpression;
import com.google.j2cl.transpiler.ast.ReturnStatement;
import com.google.j2cl.transpiler.ast.Statement;
import com.google.j2cl.transpiler.ast.SwitchConstruct;
import com.google.j2cl.transpiler.ast.SwitchExpression;
import com.google.j2cl.transpiler.ast.SwitchStatement;
import com.google.j2cl.transpiler.ast.SynchronizedStatement;
import com.google.j2cl.transpiler.ast.ThisOrSuperReference;
import com.google.j2cl.transpiler.ast.ThrowStatement;
import com.google.j2cl.transpiler.ast.TryStatement;
import com.google.j2cl.transpiler.ast.TypeDescriptor;
import com.google.j2cl.transpiler.ast.TypeDescriptors;
import com.google.j2cl.transpiler.ast.UnaryExpression;
import com.google.j2cl.transpiler.ast.VariableDeclarationExpression;
import com.google.j2cl.transpiler.ast.VariableDeclarationFragment;
import com.google.j2cl.transpiler.ast.VariableReference;
import com.google.j2cl.transpiler.ast.YieldStatement;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
import javax.annotation.Nullable;
/**
* Driver for rewriting conversions in different contexts.
*
* <p>Traverses the AST, recognizing and categorizing different conversion contexts and dispatching
* conversion requests in that context.
*/
public final class ConversionContextVisitor extends AbstractRewriter {
/** Base class for defining how to insert a conversion operation in a given conversion context. */
protected abstract static class ContextRewriter {
/** Returns the closest meaningful source position from an enclosing node. */
public final SourcePosition getSourcePosition() {
HasSourcePosition hasSourcePosition =
(HasSourcePosition)
visitor.getParent(
p ->
p instanceof HasSourcePosition hs
&& hs.getSourcePosition() != SourcePosition.NONE);
return hasSourcePosition != null
? hasSourcePosition.getSourcePosition()
: SourcePosition.NONE;
}
public final Stream<Object> getParents() {
return visitor.getParents();
}
/**
* An {@code expression} is being used as if it was of a particular type.
*
* <p>E.g an assignment like {@code Integer i = 3}, where the {@code int} expression 3 is being
* used as if it was of {@code Integer} reference type. Thus it might require conversion.
*
* <p>But it is also applicable to a receiver in a method call, e.g.
*
* <p>
*
* <pre><code>
* <T extends A> m(...) {
* T t;
* t.methodOfA()
* }
* </code>
* </pre>
*
* <p>where {@code t} might be used as if it were of type {@code A} but that is not guaranteed
* at runtime.
*/
@SuppressWarnings("unused")
protected Expression rewriteTypeConversionContext(
TypeDescriptor inferredTypeDescriptor,
TypeDescriptor declaredTypeDescriptor,
Expression expression) {
return expression;
}
/**
* An {@code expression} is being used as if it was of a particular type where a non-nullable
* type is required..
*/
protected Expression rewriteNonNullTypeConversionContext(
TypeDescriptor inferredTypeDescriptor,
TypeDescriptor declaredTypeDescriptor,
Expression expression) {
return rewriteTypeConversionContext(
inferredTypeDescriptor.toNonNullable(),
declaredTypeDescriptor.toNonNullable(),
expression);
}
/** An {@code expression} that has been assigned to a field or variable of a particular type. */
protected Expression rewriteAssignmentContext(
TypeDescriptor inferredTypeDescriptor,
TypeDescriptor declaredTypeDescriptor,
Expression expression) {
// Handle generically as a type conversion context.
return rewriteTypeConversionContext(
inferredTypeDescriptor, declaredTypeDescriptor, expression);
}
/**
* An {@code expression} is an operand of a binary numeric expression where the other operand is
* of type {@code otherOperandTypeDescriptor}.
*/
@SuppressWarnings("unused")
protected Expression rewriteBinaryNumericPromotionContext(
TypeDescriptor otherOperandTypeDescriptor, Expression operand) {
return operand;
}
/** An {@code expression} that is of a JsEnum type and needs boxing. */
protected Expression rewriteJsEnumBoxingConversionContext(Expression expression) {
return expression;
}
/** A {@code castExpression} requesting an explicit type conversion. */
protected Expression rewriteCastContext(CastExpression castExpression) {
return castExpression;
}
/** An {@code expression} that is subject of a switch statement. */
protected Expression rewriteSwitchSubjectContext(Expression expression) {
TypeDescriptor typeDescriptor = expression.getTypeDescriptor();
if (!TypeDescriptors.isBoxedOrPrimitiveType(typeDescriptor)) {
return rewriteNonNullTypeConversionContext(
typeDescriptor, expression.getDeclaredTypeDescriptor(), expression);
}
return (TypeDescriptors.isJavaLangBoolean(typeDescriptor.toRawTypeDescriptor())
|| TypeDescriptors.isPrimitiveBoolean(typeDescriptor))
? rewriteBooleanConversionContext(expression)
: rewriteUnaryNumericPromotionContext(expression);
}
/** An {@code expression} that is used as a qualifier of a member of a particular type. */
protected Expression rewriteMemberQualifierContext(
TypeDescriptor inferredTypeDescriptor,
TypeDescriptor declaredTypeDescriptor,
Expression expression) {
// Handle generically as a type conversion context.
return rewriteNonNullTypeConversionContext(
inferredTypeDescriptor, declaredTypeDescriptor, expression);
}
/** An {@code argument} that is passed to a method as a parameter. */
protected Expression rewriteMethodInvocationContext(
ParameterDescriptor inferredParameterDescriptor,
ParameterDescriptor declaredParameterDescriptor,
Expression argument) {
// By default handle method invocation parameter passing like assignments.
return rewriteTypeConversionContext(
inferredParameterDescriptor.getTypeDescriptor(),
declaredParameterDescriptor.getTypeDescriptor(),
argument);
}
/**
* An {@code argument} that is passed to a method as the array containing the vararg arguments.
*/
protected Expression rewriteVarargsParameterContext(
ParameterDescriptor inferredParameterDescriptor,
ParameterDescriptor declaredParameterDescriptor,
Expression argument) {
// The varargs argument has special handling to mimic directly passing the varargs arguments
// as any other argument. The behavior can be overridden by overriding this method.
return visitor.rewriteVarargsArgument(
inferredParameterDescriptor, declaredParameterDescriptor, argument);
}
/** An {@code expression} that is used as a string. */
protected Expression rewriteStringContext(Expression expression) {
return expression;
}
/** An {@code operand} that is used in an unary numeric operation. */
protected Expression rewriteUnaryNumericPromotionContext(Expression operand) {
return operand;
}
/** An {@code operand} that is used a boolean expression. */
protected Expression rewriteBooleanConversionContext(Expression operand) {
return operand;
}
private ConversionContextVisitor visitor;
private void setVisitor(ConversionContextVisitor visitor) {
this.visitor = visitor;
}
}
private final ContextRewriter contextRewriter;
public ConversionContextVisitor(ContextRewriter contextRewriter) {
this.contextRewriter = contextRewriter;
contextRewriter.setVisitor(this);
}
@Override
public ArrayAccess rewriteArrayAccess(ArrayAccess arrayAccess) {
Expression expression = arrayAccess.getArrayExpression();
Expression arrayExpression =
contextRewriter.rewriteNonNullTypeConversionContext(
expression.getTypeDescriptor(), expression.getDeclaredTypeDescriptor(), expression);
// The index is always int so gets rewritten with unary numeric promotion context
Expression indexExpression =
contextRewriter.rewriteUnaryNumericPromotionContext(arrayAccess.getIndexExpression());
if (arrayExpression == arrayAccess.getArrayExpression()
&& indexExpression == arrayAccess.getIndexExpression()) {
return arrayAccess;
}
return ArrayAccess.Builder.from(arrayAccess)
.setArrayExpression(arrayExpression)
.setIndexExpression(indexExpression)
.build();
}
@Override
public ArrayLength rewriteArrayLength(ArrayLength arrayLength) {
Expression expression = arrayLength.getArrayExpression();
Expression arrayExpression =
contextRewriter.rewriteNonNullTypeConversionContext(
expression.getTypeDescriptor(), expression.getDeclaredTypeDescriptor(), expression);
if (arrayExpression == arrayLength.getArrayExpression()) {
return arrayLength;
}
return ArrayLength.Builder.from(arrayLength).setArrayExpression(arrayExpression).build();
}
@Override
public ArrayLiteral rewriteArrayLiteral(ArrayLiteral arrayLiteral) {
if (getParent() instanceof Invocation invocation) {
if (arrayLiteral == Iterables.getLast(invocation.getArguments(), null)
&& invocation.getTarget().isVarargs()) {
// The expressions in the array literals encapsulating the vararg parameters are handled
// as invocation parameters so they are skipped here.
return arrayLiteral;
}
}
// assignment context
ArrayTypeDescriptor typeDescriptor = arrayLiteral.getTypeDescriptor();
ImmutableList<Expression> valueExpressions =
arrayLiteral.getValueExpressions().stream()
.map(
valueExpression ->
rewriteTypeConversionContextWithoutDeclaration(
typeDescriptor.getComponentTypeDescriptor(), valueExpression))
.collect(toImmutableList());
if (valueExpressions.equals(arrayLiteral.getValueExpressions())) {
return arrayLiteral;
}
return ArrayLiteral.newBuilder()
.setTypeDescriptor(typeDescriptor)
.setValueExpressions(valueExpressions)
.build();
}
@Override
public AssertStatement rewriteAssertStatement(AssertStatement assertStatement) {
Expression expression =
contextRewriter.rewriteBooleanConversionContext(assertStatement.getExpression());
Expression message =
assertStatement.getMessage() == null
? null
: rewriteTypeConversionContextWithoutDeclaration(
TypeDescriptors.get().javaLangObject, assertStatement.getMessage());
if (message == assertStatement.getMessage() && expression == assertStatement.getExpression()) {
return assertStatement;
}
return AssertStatement.Builder.from(assertStatement)
.setExpression(expression)
.setMessage(message)
.build();
}
@Override
public BinaryExpression rewriteBinaryExpression(BinaryExpression binaryExpression) {
// TODO(rluble): find out if what we do here in letting multiple conversion contexts perform
// changes on the same binary expression, all in one pass, is the right thing or the wrong
// thing.
Expression leftOperand = binaryExpression.getLeftOperand();
Expression rightOperand = binaryExpression.getRightOperand();
// assignment context
if (AstUtils.matchesAssignmentContext(binaryExpression)) {
rightOperand =
contextRewriter.rewriteAssignmentContext(
leftOperand.getTypeDescriptor(),
leftOperand.getDeclaredTypeDescriptor(),
rightOperand);
}
// binary numeric promotion context
if (AstUtils.matchesBinaryNumericPromotionContext(binaryExpression)) {
// TODO(b/206415539): Perform the appropriate rewriting of the lhs for compound assignments.
if (!binaryExpression.getOperator().isCompoundAssignment()) {
leftOperand =
contextRewriter.rewriteBinaryNumericPromotionContext(
rightOperand.getTypeDescriptor(), leftOperand);
}
rightOperand =
contextRewriter.rewriteBinaryNumericPromotionContext(
leftOperand.getTypeDescriptor(), rightOperand);
}
// string context
if (AstUtils.matchesStringContext(binaryExpression)) {
if (!binaryExpression.getOperator().isCompoundAssignment()) {
leftOperand = contextRewriter.rewriteStringContext(leftOperand);
}
rightOperand = contextRewriter.rewriteStringContext(rightOperand);
}
/* See JLS 5.6.1. */
if (binaryExpression.getOperator().isShiftOperator()) {
if (!binaryExpression.getOperator().isCompoundAssignment()) {
// the left operand matches a unary numeric promotion context.
leftOperand = contextRewriter.rewriteUnaryNumericPromotionContext(leftOperand);
}
rightOperand = contextRewriter.rewriteUnaryNumericPromotionContext(rightOperand);
}
// boolean context
if (AstUtils.matchesBooleanConversionContext(binaryExpression.getOperator())) {
if (!binaryExpression.getOperator().isCompoundAssignment()) {
leftOperand = contextRewriter.rewriteBooleanConversionContext(leftOperand);
}
rightOperand = contextRewriter.rewriteBooleanConversionContext(rightOperand);
}
// JsEnum boxing conversion context.
if (AstUtils.matchesJsEnumBoxingConversionContext(binaryExpression)) {
if (isBoxableJsEnumType(leftOperand.getDeclaredTypeDescriptor())) {
leftOperand = contextRewriter.rewriteJsEnumBoxingConversionContext(leftOperand);
}
if (isBoxableJsEnumType(rightOperand.getDeclaredTypeDescriptor())) {
rightOperand = contextRewriter.rewriteJsEnumBoxingConversionContext(rightOperand);
}
}
if (leftOperand == binaryExpression.getLeftOperand()
&& rightOperand == binaryExpression.getRightOperand()) {
return binaryExpression;
}
return BinaryExpression.Builder.from(binaryExpression)
.setLeftOperand(leftOperand)
.setRightOperand(rightOperand)
.build();
}
@Override
public Expression rewriteCastExpression(CastExpression castExpression) {
// cast context
return contextRewriter.rewriteCastContext(castExpression);
}
@Override
public ConditionalExpression rewriteConditionalExpression(
ConditionalExpression conditionalExpression) {
// assignment context
TypeDescriptor typeDescriptor = conditionalExpression.getTypeDescriptor();
Expression conditionExpression =
contextRewriter.rewriteBooleanConversionContext(
conditionalExpression.getConditionExpression());
Expression trueExpression =
rewriteTypeConversionContextWithoutDeclaration(
typeDescriptor, conditionalExpression.getTrueExpression());
Expression falseExpression =
rewriteTypeConversionContextWithoutDeclaration(
typeDescriptor, conditionalExpression.getFalseExpression());
if (conditionExpression == conditionalExpression.getConditionExpression()
&& trueExpression == conditionalExpression.getTrueExpression()
&& falseExpression == conditionalExpression.getFalseExpression()) {
return conditionalExpression;
}
return ConditionalExpression.Builder.from(conditionalExpression)
.setConditionExpression(conditionExpression)
.setTrueExpression(trueExpression)
.setFalseExpression(falseExpression)
.build();
}
@Override
public Field rewriteField(Field field) {
if (field.getInitializer() == null) {
// Nothing to rewrite.
return field;
}
// assignment context
Expression initializer =
contextRewriter.rewriteAssignmentContext(
field.getDescriptor().getTypeDescriptor(),
field.getDescriptor().getDeclarationDescriptor().getTypeDescriptor(),
field.getInitializer());
if (initializer == field.getInitializer()) {
return field;
}
return Field.Builder.from(field).setInitializer(initializer).build();
}
@Nullable
private Expression rewriteInstanceQualifier(
Expression qualifier, MemberDescriptor memberDescriptor) {
if (memberDescriptor.isStatic() || qualifier == null) {
return qualifier;
}
DeclaredTypeDescriptor enclosingTypeDescriptor = memberDescriptor.getEnclosingTypeDescriptor();
DeclaredTypeDescriptor declaredEnclosingTypeDescriptor =
memberDescriptor.getDeclarationDescriptor().getEnclosingTypeDescriptor();
if (memberDescriptor.isConstructor()) {
if (!enclosingTypeDescriptor.getTypeDeclaration().isCapturingEnclosingInstance()) {
return qualifier;
}
// This is a constuctor call of an inner class; hence the qualifier type is the enclosing
// class of the class where the method is defined.
enclosingTypeDescriptor = enclosingTypeDescriptor.getEnclosingTypeDescriptor();
declaredEnclosingTypeDescriptor =
declaredEnclosingTypeDescriptor.getEnclosingTypeDescriptor();
}
return contextRewriter.rewriteMemberQualifierContext(
enclosingTypeDescriptor, declaredEnclosingTypeDescriptor, qualifier);
}
@Override
public LoopStatement rewriteLoopStatement(LoopStatement loopStatement) {
Expression conditionExpression =
contextRewriter.rewriteBooleanConversionContext(loopStatement.getConditionExpression());
if (conditionExpression == loopStatement.getConditionExpression()) {
return loopStatement;
}
return LoopStatement.Builder.from(loopStatement)
.setConditionExpression(conditionExpression)
.build();
}
@Override
public ForEachStatement rewriteForEachStatement(ForEachStatement forEachStatement) {
Expression expression = forEachStatement.getIterableExpression();
Expression iterableExpression =
contextRewriter.rewriteNonNullTypeConversionContext(
expression.getTypeDescriptor(), expression.getDeclaredTypeDescriptor(), expression);
if (iterableExpression == forEachStatement.getIterableExpression()) {
return forEachStatement;
}
return ForEachStatement.Builder.from(forEachStatement)
.setIterableExpression(iterableExpression)
.build();
}
@Override
public JsForInStatement rewriteJsForInStatement(JsForInStatement forInStatement) {
Expression expression = forInStatement.getIterableExpression();
Expression iterableExpression =
contextRewriter.rewriteNonNullTypeConversionContext(
expression.getTypeDescriptor(), expression.getDeclaredTypeDescriptor(), expression);
if (iterableExpression == forInStatement.getIterableExpression()) {
return forInStatement;
}
return JsForInStatement.Builder.from(forInStatement)
.setIterableExpression(iterableExpression)
.build();
}
@Override
public Expression rewriteExpression(Expression expression) {
// Every expression needs to be handled explicitly or excluded here. This is to ensure when new
// expressions are added to the AST that a conscious decision is made, and avoid the implicit
// noop rewriting.
// Expressions that don't need handling include:
// - literals (including class literals)
// - references (variable references, this and super)
// - jsdoc casts (since they are used to specifically avoid rewriting)
// - any other expression that only requires its subexpressions to be handled, but don't need
// any rewriting themselves (MultiExpression, ExpressionWithComment, VariableDeclarations)
if (expression instanceof Literal // literals
|| expression instanceof JsConstructorReference
// expressions that needs only subexpressions to be handled
|| expression instanceof MultiExpression
|| expression instanceof ExpressionWithComment
|| expression instanceof EmbeddedStatement
|| expression instanceof FunctionExpression
|| expression instanceof VariableDeclarationExpression
|| expression instanceof JsDocExpression
// jsdoc casts
|| expression instanceof JsDocCastExpression
// references
|| expression instanceof ThisOrSuperReference
|| expression instanceof VariableReference
|| expression instanceof ArrayCreationReference) {
// These expressions do not need rewriting.
return expression;
}
throw new IllegalStateException();
}
@Override
public IfStatement rewriteIfStatement(IfStatement ifStatement) {
Expression conditionExpression =
contextRewriter.rewriteBooleanConversionContext(ifStatement.getConditionExpression());
if (conditionExpression == ifStatement.getConditionExpression()) {
return ifStatement;
}
return IfStatement.Builder.from(ifStatement)
.setConditionExpression(conditionExpression)
.build();
}
@Override
public MemberReference rewriteInvocation(Invocation invocation) {
List<Expression> rewrittenArguments = rewriteMethodInvocationContextArguments(invocation);
if (!rewrittenArguments.equals(invocation.getArguments())) {
invocation = Invocation.Builder.from(invocation).setArguments(rewrittenArguments).build();
}
return rewriteMemberReference(invocation);
}
@Override
public MemberReference rewriteMemberReference(MemberReference memberReference) {
Expression rewrittenQualifier =
rewriteInstanceQualifier(memberReference.getQualifier(), memberReference.getTarget());
if (rewrittenQualifier == memberReference.getQualifier()) {
return memberReference;
}
return MemberReference.Builder.from(memberReference).setQualifier(rewrittenQualifier).build();
}
@Override
public Node rewriteMethodReference(MethodReference methodReference) {
return MethodReference.Builder.from(methodReference)
.setQualifier(
rewriteInstanceQualifier(
methodReference.getQualifier(), methodReference.getReferencedMethodDescriptor()))
.build();
}
@Override
public InstanceOfExpression rewriteInstanceOfExpression(
InstanceOfExpression instanceOfExpression) {
if (AstUtils.matchesJsEnumBoxingConversionContext(instanceOfExpression)) {
Expression expression =
contextRewriter.rewriteJsEnumBoxingConversionContext(
instanceOfExpression.getExpression());
if (expression != instanceOfExpression.getExpression()) {
return InstanceOfExpression.Builder.from(instanceOfExpression)
.setExpression(expression)
.build();
}
}
return instanceOfExpression;
}
@Override
public NewArray rewriteNewArray(NewArray newArray) {
// unary numeric promotion context
ImmutableList<Expression> dimensionExpressions =
newArray.getDimensionExpressions().stream()
.map(contextRewriter::rewriteUnaryNumericPromotionContext)
.collect(toImmutableList());
if (dimensionExpressions.equals(newArray.getDimensionExpressions())) {
return newArray;
}
return NewArray.Builder.from(newArray).setDimensionExpressions(dimensionExpressions).build();
}
@Override
public UnaryExpression rewritePostfixExpression(PostfixExpression postfixExpression) {
Expression operand = postfixExpression.getOperand();
// unary numeric promotion context
if (AstUtils.matchesUnaryNumericPromotionContext(postfixExpression)) {
operand = contextRewriter.rewriteUnaryNumericPromotionContext(postfixExpression.getOperand());
} else if (postfixExpression.getOperator() == PostfixOperator.NOT_NULL_ASSERTION) {
operand =
contextRewriter.rewriteTypeConversionContext(
operand.getTypeDescriptor(), operand.getDeclaredTypeDescriptor(), operand);
}
if (operand == postfixExpression.getOperand()) {
return postfixExpression;
}
// TODO(b/206415539): Perform the appropriate rewriting for compound assignments.
return PostfixExpression.Builder.from(postfixExpression).setOperand(operand).build();
}
@Override
public UnaryExpression rewritePrefixExpression(PrefixExpression prefixExpression) {
Expression operand = prefixExpression.getOperand();
if (AstUtils.matchesBooleanConversionContext(prefixExpression.getOperator())) {
// unary boolean promotion context
operand = contextRewriter.rewriteBooleanConversionContext(prefixExpression.getOperand());
} else if (AstUtils.matchesUnaryNumericPromotionContext(prefixExpression)) {
// unary numeric promotion context
operand = contextRewriter.rewriteUnaryNumericPromotionContext(prefixExpression.getOperand());
}
if (operand == prefixExpression.getOperand()) {
return prefixExpression;
}
// TODO(b/206415539): Perform the appropriate rewriting for compound assignments.
return PrefixExpression.Builder.from(prefixExpression).setOperand(operand).build();
}
@Override
public ReturnStatement rewriteReturnStatement(ReturnStatement returnStatement) {
if (returnStatement.getExpression() == null) {
// Nothing to rewrite.
return returnStatement;
}
// assignment context
Expression expression =
contextRewriter.rewriteTypeConversionContext(
getEnclosingMethodLike().getDescriptor().getReturnTypeDescriptor(),
getEnclosingMethodLike()
.getDescriptor()
.getDeclarationDescriptor()
.getReturnTypeDescriptor(),
returnStatement.getExpression());
if (expression == returnStatement.getExpression()) {
return returnStatement;
}
return ReturnStatement.Builder.from(returnStatement).setExpression(expression).build();
}
@Override
public YieldStatement rewriteYieldStatement(YieldStatement yieldStatement) {
// assignment context
Expression expression =
rewriteTypeConversionContextWithoutDeclaration(
getYieldTargetExpression().getTypeDescriptor(), yieldStatement.getExpression());
if (expression == yieldStatement.getExpression()) {
return yieldStatement;
}
return YieldStatement.Builder.from(yieldStatement).setExpression(expression).build();
}
@Override
public Statement rewriteStatement(Statement statement) {
// Every statement needs to be handled explicitly or excluded here.
if (statement instanceof ExpressionStatement
|| statement instanceof Block
|| statement instanceof BreakStatement
|| statement instanceof ContinueStatement
|| statement instanceof FieldDeclarationStatement
|| statement instanceof TryStatement
|| statement instanceof LabeledStatement
|| statement instanceof LocalClassDeclarationStatement
|| statement instanceof LocalFunctionDeclarationStatement) {
// These statements do not need rewriting.
return statement;
}
throw new IllegalStateException();
}
@Override
public SwitchExpression rewriteSwitchExpression(SwitchExpression switchExpression) {
return rewriteSwitchConstruct(switchExpression);
}
@Override
public SwitchStatement rewriteSwitchStatement(SwitchStatement switchStatement) {
return rewriteSwitchConstruct(switchStatement);
}
private <T extends SwitchConstruct<T>> T rewriteSwitchConstruct(T switchConstruct) {
Expression expression =
contextRewriter.rewriteSwitchSubjectContext(switchConstruct.getExpression());
if (expression == switchConstruct.getExpression()) {
return switchConstruct;
}
return switchConstruct.toBuilder().setExpression(expression).build();
}
@Override
public SynchronizedStatement rewriteSynchronizedStatement(
SynchronizedStatement synchronizedStatement) {
// unary numeric promotion
return SynchronizedStatement.Builder.from(synchronizedStatement)
.setExpression(
contextRewriter.rewriteNonNullTypeConversionContext(
synchronizedStatement.getExpression().getTypeDescriptor(),
TypeDescriptors.get().javaLangObject,
synchronizedStatement.getExpression()))
.build();
}
@Override
public ThrowStatement rewriteThrowStatement(ThrowStatement throwStatement) {
Expression expression = throwStatement.getExpression();
return ThrowStatement.Builder.from(throwStatement)
.setExpression(
contextRewriter.rewriteNonNullTypeConversionContext(
TypeDescriptors.get().javaLangThrowable,
TypeDescriptors.get().javaLangThrowable,
expression))
.build();
}
@Override
public VariableDeclarationFragment rewriteVariableDeclarationFragment(
VariableDeclarationFragment variableDeclaration) {
if (variableDeclaration.getInitializer() == null) {
// Nothing to rewrite.
return variableDeclaration;
}
// assignment context
Expression initializer =
rewriteTypeConversionContextWithoutDeclaration(
variableDeclaration.getVariable().getTypeDescriptor(),
variableDeclaration.getInitializer());
if (initializer == variableDeclaration.getInitializer()) {
return variableDeclaration;
}
return VariableDeclarationFragment.Builder.from(variableDeclaration)
.setInitializer(initializer)
.build();
}
private MethodLike getEnclosingMethodLike() {
return (MethodLike) getParent(MethodLike.class::isInstance);
}
private Expression getYieldTargetExpression() {
return (Expression)
getParent(o -> o instanceof SwitchExpression || o instanceof EmbeddedStatement);
}
private Expression rewriteTypeConversionContextWithoutDeclaration(
TypeDescriptor toTypeDescriptor, Expression expression) {
return contextRewriter.rewriteTypeConversionContext(
toTypeDescriptor, toTypeDescriptor, expression);
}
private List<Expression> rewriteMethodInvocationContextArguments(Invocation invocation) {
ImmutableList<ParameterDescriptor> inferredParameterDescriptors =
invocation.getTarget().getParameterDescriptors();
ImmutableList<ParameterDescriptor> declaredParameterDescriptors =
invocation.getTarget().getDeclarationDescriptor().getParameterDescriptors();
List<Expression> argumentExpressions = invocation.getArguments();
// Look at each param/argument pair.
List<Expression> newArgumentExpressions = new ArrayList<>();
for (int argIndex = 0; argIndex < inferredParameterDescriptors.size(); argIndex++) {
ParameterDescriptor inferredParameterDescriptor = inferredParameterDescriptors.get(argIndex);
ParameterDescriptor declaredParameterDescriptor = declaredParameterDescriptors.get(argIndex);
Expression argumentExpression = argumentExpressions.get(argIndex);
newArgumentExpressions.add(
declaredParameterDescriptor.isVarargs()
// Handle vararg parameters that at this point are inside vararg literals by
// delegating explicitly to an overrideable handler.
? contextRewriter.rewriteVarargsParameterContext(
inferredParameterDescriptor, declaredParameterDescriptor, argumentExpression)
: contextRewriter.rewriteMethodInvocationContext(
inferredParameterDescriptor, declaredParameterDescriptor, argumentExpression));
}
return newArgumentExpressions;
}
/** Implements the rewriting of varargs arguments. */
private Expression rewriteVarargsArgument(
ParameterDescriptor inferredParameterDescriptor,
ParameterDescriptor declaredParameterDescriptor,
Expression expression) {
if (!(expression instanceof ArrayLiteral arrayLiteral)) {
// The vararg was passed directly as an array, not as separate arguments. Process it as one
// argument.
return contextRewriter.rewriteMethodInvocationContext(
inferredParameterDescriptor, declaredParameterDescriptor, expression);
}
return arrayLiteral.toBuilder()
.setValueExpressions(
arrayLiteral.getValueExpressions().stream()
.map(
// Process each element of the array literal the same way a single argument is
// processed argument.
e ->
contextRewriter.rewriteMethodInvocationContext(
toComponentParameterDescriptor(inferredParameterDescriptor),
toComponentParameterDescriptor(declaredParameterDescriptor),
e))
.collect(toImmutableList()))
.build();
}
/**
* Converts the varargs ParameterDescriptor into the equivalent ParameterDescriptor for each
* individual argument.
*/
private static ParameterDescriptor toComponentParameterDescriptor(
ParameterDescriptor parameterDescriptor) {
return parameterDescriptor.toBuilder()
.setVarargs(false)
.setTypeDescriptor(
((ArrayTypeDescriptor) parameterDescriptor.getTypeDescriptor())
.getComponentTypeDescriptor())
.build();
}
}
|
openjdk/jdk8 | 36,966 | jdk/src/share/classes/javax/swing/text/MaskFormatter.java | /*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.text;
import java.io.*;
import java.text.*;
import java.util.*;
import javax.swing.*;
/**
* <code>MaskFormatter</code> is used to format and edit strings. The behavior
* of a <code>MaskFormatter</code> is controlled by way of a String mask
* that specifies the valid characters that can be contained at a particular
* location in the <code>Document</code> model. The following characters can
* be specified:
*
* <table border=1 summary="Valid characters and their descriptions">
* <tr>
* <th>Character </th>
* <th><p style="text-align:left">Description</p></th>
* </tr>
* <tr>
* <td>#</td>
* <td>Any valid number, uses <code>Character.isDigit</code>.</td>
* </tr>
* <tr>
* <td>'</td>
* <td>Escape character, used to escape any of the
* special formatting characters.</td>
* </tr>
* <tr>
* <td>U</td><td>Any character (<code>Character.isLetter</code>). All
* lowercase letters are mapped to upper case.</td>
* </tr>
* <tr><td>L</td><td>Any character (<code>Character.isLetter</code>). All
* upper case letters are mapped to lower case.</td>
* </tr>
* <tr><td>A</td><td>Any character or number (<code>Character.isLetter</code>
* or <code>Character.isDigit</code>)</td>
* </tr>
* <tr><td>?</td><td>Any character
* (<code>Character.isLetter</code>).</td>
* </tr>
* <tr><td>*</td><td>Anything.</td></tr>
* <tr><td>H</td><td>Any hex character (0-9, a-f or A-F).</td></tr>
* </table>
*
* <p>
* Typically characters correspond to one char, but in certain languages this
* is not the case. The mask is on a per character basis, and will thus
* adjust to fit as many chars as are needed.
* <p>
* You can further restrict the characters that can be input by the
* <code>setInvalidCharacters</code> and <code>setValidCharacters</code>
* methods. <code>setInvalidCharacters</code> allows you to specify
* which characters are not legal. <code>setValidCharacters</code> allows
* you to specify which characters are valid. For example, the following
* code block is equivalent to a mask of '0xHHH' with no invalid/valid
* characters:
* <pre>
* MaskFormatter formatter = new MaskFormatter("0x***");
* formatter.setValidCharacters("0123456789abcdefABCDEF");
* </pre>
* <p>
* When initially formatting a value if the length of the string is
* less than the length of the mask, two things can happen. Either
* the placeholder string will be used, or the placeholder character will
* be used. Precedence is given to the placeholder string. For example:
* <pre>
* MaskFormatter formatter = new MaskFormatter("###-####");
* formatter.setPlaceholderCharacter('_');
* formatter.getDisplayValue(tf, "123");
* </pre>
* <p>
* Would result in the string '123-____'. If
* <code>setPlaceholder("555-1212")</code> was invoked '123-1212' would
* result. The placeholder String is only used on the initial format,
* on subsequent formats only the placeholder character will be used.
* <p>
* If a <code>MaskFormatter</code> is configured to only allow valid characters
* (<code>setAllowsInvalid(false)</code>) literal characters will be skipped as
* necessary when editing. Consider a <code>MaskFormatter</code> with
* the mask "###-####" and current value "555-1212". Using the right
* arrow key to navigate through the field will result in (| indicates the
* position of the caret):
* <pre>
* |555-1212
* 5|55-1212
* 55|5-1212
* 555-|1212
* 555-1|212
* </pre>
* The '-' is a literal (non-editable) character, and is skipped.
* <p>
* Similar behavior will result when editing. Consider inserting the string
* '123-45' and '12345' into the <code>MaskFormatter</code> in the
* previous example. Both inserts will result in the same String,
* '123-45__'. When <code>MaskFormatter</code>
* is processing the insert at character position 3 (the '-'), two things can
* happen:
* <ol>
* <li>If the inserted character is '-', it is accepted.
* <li>If the inserted character matches the mask for the next non-literal
* character, it is accepted at the new location.
* <li>Anything else results in an invalid edit
* </ol>
* <p>
* By default <code>MaskFormatter</code> will not allow invalid edits, you can
* change this with the <code>setAllowsInvalid</code> method, and will
* commit edits on valid edits (use the <code>setCommitsOnValidEdit</code> to
* change this).
* <p>
* By default, <code>MaskFormatter</code> is in overwrite mode. That is as
* characters are typed a new character is not inserted, rather the character
* at the current location is replaced with the newly typed character. You
* can change this behavior by way of the method <code>setOverwriteMode</code>.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @since 1.4
*/
public class MaskFormatter extends DefaultFormatter {
// Potential values in mask.
private static final char DIGIT_KEY = '#';
private static final char LITERAL_KEY = '\'';
private static final char UPPERCASE_KEY = 'U';
private static final char LOWERCASE_KEY = 'L';
private static final char ALPHA_NUMERIC_KEY = 'A';
private static final char CHARACTER_KEY = '?';
private static final char ANYTHING_KEY = '*';
private static final char HEX_KEY = 'H';
private static final MaskCharacter[] EmptyMaskChars = new MaskCharacter[0];
/** The user specified mask. */
private String mask;
private transient MaskCharacter[] maskChars;
/** List of valid characters. */
private String validCharacters;
/** List of invalid characters. */
private String invalidCharacters;
/** String used for the passed in value if it does not completely
* fill the mask. */
private String placeholderString;
/** String used to represent characters not present. */
private char placeholder;
/** Indicates if the value contains the literal characters. */
private boolean containsLiteralChars;
/**
* Creates a MaskFormatter with no mask.
*/
public MaskFormatter() {
setAllowsInvalid(false);
containsLiteralChars = true;
maskChars = EmptyMaskChars;
placeholder = ' ';
}
/**
* Creates a <code>MaskFormatter</code> with the specified mask.
* A <code>ParseException</code>
* will be thrown if <code>mask</code> is an invalid mask.
*
* @throws ParseException if mask does not contain valid mask characters
*/
public MaskFormatter(String mask) throws ParseException {
this();
setMask(mask);
}
/**
* Sets the mask dictating the legal characters.
* This will throw a <code>ParseException</code> if <code>mask</code> is
* not valid.
*
* @throws ParseException if mask does not contain valid mask characters
*/
public void setMask(String mask) throws ParseException {
this.mask = mask;
updateInternalMask();
}
/**
* Returns the formatting mask.
*
* @return Mask dictating legal character values.
*/
public String getMask() {
return mask;
}
/**
* Allows for further restricting of the characters that can be input.
* Only characters specified in the mask, not in the
* <code>invalidCharacters</code>, and in
* <code>validCharacters</code> will be allowed to be input. Passing
* in null (the default) implies the valid characters are only bound
* by the mask and the invalid characters.
*
* @param validCharacters If non-null, specifies legal characters.
*/
public void setValidCharacters(String validCharacters) {
this.validCharacters = validCharacters;
}
/**
* Returns the valid characters that can be input.
*
* @return Legal characters
*/
public String getValidCharacters() {
return validCharacters;
}
/**
* Allows for further restricting of the characters that can be input.
* Only characters specified in the mask, not in the
* <code>invalidCharacters</code>, and in
* <code>validCharacters</code> will be allowed to be input. Passing
* in null (the default) implies the valid characters are only bound
* by the mask and the valid characters.
*
* @param invalidCharacters If non-null, specifies illegal characters.
*/
public void setInvalidCharacters(String invalidCharacters) {
this.invalidCharacters = invalidCharacters;
}
/**
* Returns the characters that are not valid for input.
*
* @return illegal characters.
*/
public String getInvalidCharacters() {
return invalidCharacters;
}
/**
* Sets the string to use if the value does not completely fill in
* the mask. A null value implies the placeholder char should be used.
*
* @param placeholder String used when formatting if the value does not
* completely fill the mask
*/
public void setPlaceholder(String placeholder) {
this.placeholderString = placeholder;
}
/**
* Returns the String to use if the value does not completely fill
* in the mask.
*
* @return String used when formatting if the value does not
* completely fill the mask
*/
public String getPlaceholder() {
return placeholderString;
}
/**
* Sets the character to use in place of characters that are not present
* in the value, ie the user must fill them in. The default value is
* a space.
* <p>
* This is only applicable if the placeholder string has not been
* specified, or does not completely fill in the mask.
*
* @param placeholder Character used when formatting if the value does not
* completely fill the mask
*/
public void setPlaceholderCharacter(char placeholder) {
this.placeholder = placeholder;
}
/**
* Returns the character to use in place of characters that are not present
* in the value, ie the user must fill them in.
*
* @return Character used when formatting if the value does not
* completely fill the mask
*/
public char getPlaceholderCharacter() {
return placeholder;
}
/**
* If true, the returned value and set value will also contain the literal
* characters in mask.
* <p>
* For example, if the mask is <code>'(###) ###-####'</code>, the
* current value is <code>'(415) 555-1212'</code>, and
* <code>valueContainsLiteralCharacters</code> is
* true <code>stringToValue</code> will return
* <code>'(415) 555-1212'</code>. On the other hand, if
* <code>valueContainsLiteralCharacters</code> is false,
* <code>stringToValue</code> will return <code>'4155551212'</code>.
*
* @param containsLiteralChars Used to indicate if literal characters in
* mask should be returned in stringToValue
*/
public void setValueContainsLiteralCharacters(
boolean containsLiteralChars) {
this.containsLiteralChars = containsLiteralChars;
}
/**
* Returns true if <code>stringToValue</code> should return literal
* characters in the mask.
*
* @return True if literal characters in mask should be returned in
* stringToValue
*/
public boolean getValueContainsLiteralCharacters() {
return containsLiteralChars;
}
/**
* Parses the text, returning the appropriate Object representation of
* the String <code>value</code>. This strips the literal characters as
* necessary and invokes supers <code>stringToValue</code>, so that if
* you have specified a value class (<code>setValueClass</code>) an
* instance of it will be created. This will throw a
* <code>ParseException</code> if the value does not match the current
* mask. Refer to {@link #setValueContainsLiteralCharacters} for details
* on how literals are treated.
*
* @throws ParseException if there is an error in the conversion
* @param value String to convert
* @see #setValueContainsLiteralCharacters
* @return Object representation of text
*/
public Object stringToValue(String value) throws ParseException {
return stringToValue(value, true);
}
/**
* Returns a String representation of the Object <code>value</code>
* based on the mask. Refer to
* {@link #setValueContainsLiteralCharacters} for details
* on how literals are treated.
*
* @throws ParseException if there is an error in the conversion
* @param value Value to convert
* @see #setValueContainsLiteralCharacters
* @return String representation of value
*/
public String valueToString(Object value) throws ParseException {
String sValue = (value == null) ? "" : value.toString();
StringBuilder result = new StringBuilder();
String placeholder = getPlaceholder();
int[] valueCounter = { 0 };
append(result, sValue, valueCounter, placeholder, maskChars);
return result.toString();
}
/**
* Installs the <code>DefaultFormatter</code> onto a particular
* <code>JFormattedTextField</code>.
* This will invoke <code>valueToString</code> to convert the
* current value from the <code>JFormattedTextField</code> to
* a String. This will then install the <code>Action</code>s from
* <code>getActions</code>, the <code>DocumentFilter</code>
* returned from <code>getDocumentFilter</code> and the
* <code>NavigationFilter</code> returned from
* <code>getNavigationFilter</code> onto the
* <code>JFormattedTextField</code>.
* <p>
* Subclasses will typically only need to override this if they
* wish to install additional listeners on the
* <code>JFormattedTextField</code>.
* <p>
* If there is a <code>ParseException</code> in converting the
* current value to a String, this will set the text to an empty
* String, and mark the <code>JFormattedTextField</code> as being
* in an invalid state.
* <p>
* While this is a public method, this is typically only useful
* for subclassers of <code>JFormattedTextField</code>.
* <code>JFormattedTextField</code> will invoke this method at
* the appropriate times when the value changes, or its internal
* state changes.
*
* @param ftf JFormattedTextField to format for, may be null indicating
* uninstall from current JFormattedTextField.
*/
public void install(JFormattedTextField ftf) {
super.install(ftf);
// valueToString doesn't throw, but stringToValue does, need to
// update the editValid state appropriately
if (ftf != null) {
Object value = ftf.getValue();
try {
stringToValue(valueToString(value));
} catch (ParseException pe) {
setEditValid(false);
}
}
}
/**
* Actual <code>stringToValue</code> implementation.
* If <code>completeMatch</code> is true, the value must exactly match
* the mask, on the other hand if <code>completeMatch</code> is false
* the string must match the mask or the placeholder string.
*/
private Object stringToValue(String value, boolean completeMatch) throws
ParseException {
int errorOffset;
if ((errorOffset = getInvalidOffset(value, completeMatch)) == -1) {
if (!getValueContainsLiteralCharacters()) {
value = stripLiteralChars(value);
}
return super.stringToValue(value);
}
throw new ParseException("stringToValue passed invalid value",
errorOffset);
}
/**
* Returns -1 if the passed in string is valid, otherwise the index of
* the first bogus character is returned.
*/
private int getInvalidOffset(String string, boolean completeMatch) {
int iLength = string.length();
if (iLength != getMaxLength()) {
// trivially false
return iLength;
}
for (int counter = 0, max = string.length(); counter < max; counter++){
char aChar = string.charAt(counter);
if (!isValidCharacter(counter, aChar) &&
(completeMatch || !isPlaceholder(counter, aChar))) {
return counter;
}
}
return -1;
}
/**
* Invokes <code>append</code> on the mask characters in
* <code>mask</code>.
*/
private void append(StringBuilder result, String value, int[] index,
String placeholder, MaskCharacter[] mask)
throws ParseException {
for (int counter = 0, maxCounter = mask.length;
counter < maxCounter; counter++) {
mask[counter].append(result, value, index, placeholder);
}
}
/**
* Updates the internal representation of the mask.
*/
private void updateInternalMask() throws ParseException {
String mask = getMask();
ArrayList<MaskCharacter> fixed = new ArrayList<MaskCharacter>();
ArrayList<MaskCharacter> temp = fixed;
if (mask != null) {
for (int counter = 0, maxCounter = mask.length();
counter < maxCounter; counter++) {
char maskChar = mask.charAt(counter);
switch (maskChar) {
case DIGIT_KEY:
temp.add(new DigitMaskCharacter());
break;
case LITERAL_KEY:
if (++counter < maxCounter) {
maskChar = mask.charAt(counter);
temp.add(new LiteralCharacter(maskChar));
}
// else: Could actually throw if else
break;
case UPPERCASE_KEY:
temp.add(new UpperCaseCharacter());
break;
case LOWERCASE_KEY:
temp.add(new LowerCaseCharacter());
break;
case ALPHA_NUMERIC_KEY:
temp.add(new AlphaNumericCharacter());
break;
case CHARACTER_KEY:
temp.add(new CharCharacter());
break;
case ANYTHING_KEY:
temp.add(new MaskCharacter());
break;
case HEX_KEY:
temp.add(new HexCharacter());
break;
default:
temp.add(new LiteralCharacter(maskChar));
break;
}
}
}
if (fixed.size() == 0) {
maskChars = EmptyMaskChars;
}
else {
maskChars = new MaskCharacter[fixed.size()];
fixed.toArray(maskChars);
}
}
/**
* Returns the MaskCharacter at the specified location.
*/
private MaskCharacter getMaskCharacter(int index) {
if (index >= maskChars.length) {
return null;
}
return maskChars[index];
}
/**
* Returns true if the placeholder character matches aChar.
*/
private boolean isPlaceholder(int index, char aChar) {
return (getPlaceholderCharacter() == aChar);
}
/**
* Returns true if the passed in character matches the mask at the
* specified location.
*/
private boolean isValidCharacter(int index, char aChar) {
return getMaskCharacter(index).isValidCharacter(aChar);
}
/**
* Returns true if the character at the specified location is a literal,
* that is it can not be edited.
*/
private boolean isLiteral(int index) {
return getMaskCharacter(index).isLiteral();
}
/**
* Returns the maximum length the text can be.
*/
private int getMaxLength() {
return maskChars.length;
}
/**
* Returns the literal character at the specified location.
*/
private char getLiteral(int index) {
return getMaskCharacter(index).getChar((char)0);
}
/**
* Returns the character to insert at the specified location based on
* the passed in character. This provides a way to map certain sets
* of characters to alternative values (lowercase to
* uppercase...).
*/
private char getCharacter(int index, char aChar) {
return getMaskCharacter(index).getChar(aChar);
}
/**
* Removes the literal characters from the passed in string.
*/
private String stripLiteralChars(String string) {
StringBuilder sb = null;
int last = 0;
for (int counter = 0, max = string.length(); counter < max; counter++){
if (isLiteral(counter)) {
if (sb == null) {
sb = new StringBuilder();
if (counter > 0) {
sb.append(string.substring(0, counter));
}
last = counter + 1;
}
else if (last != counter) {
sb.append(string.substring(last, counter));
}
last = counter + 1;
}
}
if (sb == null) {
// Assume the mask isn't all literals.
return string;
}
else if (last != string.length()) {
if (sb == null) {
return string.substring(last);
}
sb.append(string.substring(last));
}
return sb.toString();
}
/**
* Subclassed to update the internal representation of the mask after
* the default read operation has completed.
*/
private void readObject(ObjectInputStream s)
throws IOException, ClassNotFoundException {
s.defaultReadObject();
try {
updateInternalMask();
} catch (ParseException pe) {
// assert();
}
}
/**
* Returns true if the MaskFormatter allows invalid, or
* the offset is less than the max length and the character at
* <code>offset</code> is a literal.
*/
boolean isNavigatable(int offset) {
if (!getAllowsInvalid()) {
return (offset < getMaxLength() && !isLiteral(offset));
}
return true;
}
/*
* Returns true if the operation described by <code>rh</code> will
* result in a legal edit. This may set the <code>value</code>
* field of <code>rh</code>.
* <p>
* This is overriden to return true for a partial match.
*/
boolean isValidEdit(ReplaceHolder rh) {
if (!getAllowsInvalid()) {
String newString = getReplaceString(rh.offset, rh.length, rh.text);
try {
rh.value = stringToValue(newString, false);
return true;
} catch (ParseException pe) {
return false;
}
}
return true;
}
/**
* This method does the following (assuming !getAllowsInvalid()):
* iterate over the max of the deleted region or the text length, for
* each character:
* <ol>
* <li>If it is valid (matches the mask at the particular position, or
* matches the literal character at the position), allow it
* <li>Else if the position identifies a literal character, add it. This
* allows for the user to paste in text that may/may not contain
* the literals. For example, in pasing in 5551212 into ###-####
* when the 1 is evaluated it is illegal (by the first test), but there
* is a literal at this position (-), so it is used. NOTE: This has
* a problem that you can't tell (without looking ahead) if you should
* eat literals in the text. For example, if you paste '555' into
* #5##, should it result in '5555' or '555 '? The current code will
* result in the latter, which feels a little better as selecting
* text than pasting will always result in the same thing.
* <li>Else if at the end of the inserted text, the replace the item with
* the placeholder
* <li>Otherwise the insert is bogus and false is returned.
* </ol>
*/
boolean canReplace(ReplaceHolder rh) {
// This method is rather long, but much of the burden is in
// maintaining a String and swapping to a StringBuilder only if
// absolutely necessary.
if (!getAllowsInvalid()) {
StringBuilder replace = null;
String text = rh.text;
int tl = (text != null) ? text.length() : 0;
if (tl == 0 && rh.length == 1 && getFormattedTextField().
getSelectionStart() != rh.offset) {
// Backspace, adjust to actually delete next non-literal.
while (rh.offset > 0 && isLiteral(rh.offset)) {
rh.offset--;
}
}
int max = Math.min(getMaxLength() - rh.offset,
Math.max(tl, rh.length));
for (int counter = 0, textIndex = 0; counter < max; counter++) {
if (textIndex < tl && isValidCharacter(rh.offset + counter,
text.charAt(textIndex))) {
char aChar = text.charAt(textIndex);
if (aChar != getCharacter(rh.offset + counter, aChar)) {
if (replace == null) {
replace = new StringBuilder();
if (textIndex > 0) {
replace.append(text.substring(0, textIndex));
}
}
}
if (replace != null) {
replace.append(getCharacter(rh.offset + counter,
aChar));
}
textIndex++;
}
else if (isLiteral(rh.offset + counter)) {
if (replace != null) {
replace.append(getLiteral(rh.offset + counter));
if (textIndex < tl) {
max = Math.min(max + 1, getMaxLength() -
rh.offset);
}
}
else if (textIndex > 0) {
replace = new StringBuilder(max);
replace.append(text.substring(0, textIndex));
replace.append(getLiteral(rh.offset + counter));
if (textIndex < tl) {
// Evaluate the character in text again.
max = Math.min(max + 1, getMaxLength() -
rh.offset);
}
else if (rh.cursorPosition == -1) {
rh.cursorPosition = rh.offset + counter;
}
}
else {
rh.offset++;
rh.length--;
counter--;
max--;
}
}
else if (textIndex >= tl) {
// placeholder
if (replace == null) {
replace = new StringBuilder();
if (text != null) {
replace.append(text);
}
}
replace.append(getPlaceholderCharacter());
if (tl > 0 && rh.cursorPosition == -1) {
rh.cursorPosition = rh.offset + counter;
}
}
else {
// Bogus character.
return false;
}
}
if (replace != null) {
rh.text = replace.toString();
}
else if (text != null && rh.offset + tl > getMaxLength()) {
rh.text = text.substring(0, getMaxLength() - rh.offset);
}
if (getOverwriteMode() && rh.text != null) {
rh.length = rh.text.length();
}
}
return super.canReplace(rh);
}
//
// Interal classes used to represent the mask.
//
private class MaskCharacter {
/**
* Subclasses should override this returning true if the instance
* represents a literal character. The default implementation
* returns false.
*/
public boolean isLiteral() {
return false;
}
/**
* Returns true if <code>aChar</code> is a valid reprensentation of
* the receiver. The default implementation returns true if the
* receiver represents a literal character and <code>getChar</code>
* == aChar. Otherwise, this will return true is <code>aChar</code>
* is contained in the valid characters and not contained
* in the invalid characters.
*/
public boolean isValidCharacter(char aChar) {
if (isLiteral()) {
return (getChar(aChar) == aChar);
}
aChar = getChar(aChar);
String filter = getValidCharacters();
if (filter != null && filter.indexOf(aChar) == -1) {
return false;
}
filter = getInvalidCharacters();
if (filter != null && filter.indexOf(aChar) != -1) {
return false;
}
return true;
}
/**
* Returns the character to insert for <code>aChar</code>. The
* default implementation returns <code>aChar</code>. Subclasses
* that wish to do some sort of mapping, perhaps lower case to upper
* case should override this and do the necessary mapping.
*/
public char getChar(char aChar) {
return aChar;
}
/**
* Appends the necessary character in <code>formatting</code> at
* <code>index</code> to <code>buff</code>.
*/
public void append(StringBuilder buff, String formatting, int[] index,
String placeholder)
throws ParseException {
boolean inString = index[0] < formatting.length();
char aChar = inString ? formatting.charAt(index[0]) : 0;
if (isLiteral()) {
buff.append(getChar(aChar));
if (getValueContainsLiteralCharacters()) {
if (inString && aChar != getChar(aChar)) {
throw new ParseException("Invalid character: " +
aChar, index[0]);
}
index[0] = index[0] + 1;
}
}
else if (index[0] >= formatting.length()) {
if (placeholder != null && index[0] < placeholder.length()) {
buff.append(placeholder.charAt(index[0]));
}
else {
buff.append(getPlaceholderCharacter());
}
index[0] = index[0] + 1;
}
else if (isValidCharacter(aChar)) {
buff.append(getChar(aChar));
index[0] = index[0] + 1;
}
else {
throw new ParseException("Invalid character: " + aChar,
index[0]);
}
}
}
/**
* Used to represent a fixed character in the mask.
*/
private class LiteralCharacter extends MaskCharacter {
private char fixedChar;
public LiteralCharacter(char fixedChar) {
this.fixedChar = fixedChar;
}
public boolean isLiteral() {
return true;
}
public char getChar(char aChar) {
return fixedChar;
}
}
/**
* Represents a number, uses <code>Character.isDigit</code>.
*/
private class DigitMaskCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return (Character.isDigit(aChar) &&
super.isValidCharacter(aChar));
}
}
/**
* Represents a character, lower case letters are mapped to upper case
* using <code>Character.toUpperCase</code>.
*/
private class UpperCaseCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return (Character.isLetter(aChar) &&
super.isValidCharacter(aChar));
}
public char getChar(char aChar) {
return Character.toUpperCase(aChar);
}
}
/**
* Represents a character, upper case letters are mapped to lower case
* using <code>Character.toLowerCase</code>.
*/
private class LowerCaseCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return (Character.isLetter(aChar) &&
super.isValidCharacter(aChar));
}
public char getChar(char aChar) {
return Character.toLowerCase(aChar);
}
}
/**
* Represents either a character or digit, uses
* <code>Character.isLetterOrDigit</code>.
*/
private class AlphaNumericCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return (Character.isLetterOrDigit(aChar) &&
super.isValidCharacter(aChar));
}
}
/**
* Represents a letter, uses <code>Character.isLetter</code>.
*/
private class CharCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return (Character.isLetter(aChar) &&
super.isValidCharacter(aChar));
}
}
/**
* Represents a hex character, 0-9a-fA-F. a-f is mapped to A-F
*/
private class HexCharacter extends MaskCharacter {
public boolean isValidCharacter(char aChar) {
return ((aChar == '0' || aChar == '1' ||
aChar == '2' || aChar == '3' ||
aChar == '4' || aChar == '5' ||
aChar == '6' || aChar == '7' ||
aChar == '8' || aChar == '9' ||
aChar == 'a' || aChar == 'A' ||
aChar == 'b' || aChar == 'B' ||
aChar == 'c' || aChar == 'C' ||
aChar == 'd' || aChar == 'D' ||
aChar == 'e' || aChar == 'E' ||
aChar == 'f' || aChar == 'F') &&
super.isValidCharacter(aChar));
}
public char getChar(char aChar) {
if (Character.isDigit(aChar)) {
return aChar;
}
return Character.toUpperCase(aChar);
}
}
}
|
googleapis/google-cloud-java | 36,732 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/ListTopicsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Response for ListTopics.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListTopicsResponse}
*/
public final class ListTopicsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.ListTopicsResponse)
ListTopicsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTopicsResponse.newBuilder() to construct.
private ListTopicsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTopicsResponse() {
topics_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTopicsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListTopicsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListTopicsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListTopicsResponse.class,
com.google.cloud.managedkafka.v1.ListTopicsResponse.Builder.class);
}
public static final int TOPICS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.managedkafka.v1.Topic> topics_;
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.managedkafka.v1.Topic> getTopicsList() {
return topics_;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.managedkafka.v1.TopicOrBuilder>
getTopicsOrBuilderList() {
return topics_;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
@java.lang.Override
public int getTopicsCount() {
return topics_.size();
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Topic getTopics(int index) {
return topics_.get(index);
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicsOrBuilder(int index) {
return topics_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < topics_.size(); i++) {
output.writeMessage(1, topics_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < topics_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, topics_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.ListTopicsResponse)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.ListTopicsResponse other =
(com.google.cloud.managedkafka.v1.ListTopicsResponse) obj;
if (!getTopicsList().equals(other.getTopicsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTopicsCount() > 0) {
hash = (37 * hash) + TOPICS_FIELD_NUMBER;
hash = (53 * hash) + getTopicsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.ListTopicsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListTopics.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListTopicsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.ListTopicsResponse)
com.google.cloud.managedkafka.v1.ListTopicsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListTopicsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListTopicsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListTopicsResponse.class,
com.google.cloud.managedkafka.v1.ListTopicsResponse.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.ListTopicsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (topicsBuilder_ == null) {
topics_ = java.util.Collections.emptyList();
} else {
topics_ = null;
topicsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListTopicsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListTopicsResponse getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.ListTopicsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListTopicsResponse build() {
com.google.cloud.managedkafka.v1.ListTopicsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListTopicsResponse buildPartial() {
com.google.cloud.managedkafka.v1.ListTopicsResponse result =
new com.google.cloud.managedkafka.v1.ListTopicsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.managedkafka.v1.ListTopicsResponse result) {
if (topicsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
topics_ = java.util.Collections.unmodifiableList(topics_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.topics_ = topics_;
} else {
result.topics_ = topicsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.managedkafka.v1.ListTopicsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.ListTopicsResponse) {
return mergeFrom((com.google.cloud.managedkafka.v1.ListTopicsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.ListTopicsResponse other) {
if (other == com.google.cloud.managedkafka.v1.ListTopicsResponse.getDefaultInstance())
return this;
if (topicsBuilder_ == null) {
if (!other.topics_.isEmpty()) {
if (topics_.isEmpty()) {
topics_ = other.topics_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTopicsIsMutable();
topics_.addAll(other.topics_);
}
onChanged();
}
} else {
if (!other.topics_.isEmpty()) {
if (topicsBuilder_.isEmpty()) {
topicsBuilder_.dispose();
topicsBuilder_ = null;
topics_ = other.topics_;
bitField0_ = (bitField0_ & ~0x00000001);
topicsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTopicsFieldBuilder()
: null;
} else {
topicsBuilder_.addAllMessages(other.topics_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.managedkafka.v1.Topic m =
input.readMessage(
com.google.cloud.managedkafka.v1.Topic.parser(), extensionRegistry);
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
topics_.add(m);
} else {
topicsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.managedkafka.v1.Topic> topics_ =
java.util.Collections.emptyList();
private void ensureTopicsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
topics_ = new java.util.ArrayList<com.google.cloud.managedkafka.v1.Topic>(topics_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
topicsBuilder_;
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Topic> getTopicsList() {
if (topicsBuilder_ == null) {
return java.util.Collections.unmodifiableList(topics_);
} else {
return topicsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public int getTopicsCount() {
if (topicsBuilder_ == null) {
return topics_.size();
} else {
return topicsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Topic getTopics(int index) {
if (topicsBuilder_ == null) {
return topics_.get(index);
} else {
return topicsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder setTopics(int index, com.google.cloud.managedkafka.v1.Topic value) {
if (topicsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTopicsIsMutable();
topics_.set(index, value);
onChanged();
} else {
topicsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder setTopics(
int index, com.google.cloud.managedkafka.v1.Topic.Builder builderForValue) {
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
topics_.set(index, builderForValue.build());
onChanged();
} else {
topicsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder addTopics(com.google.cloud.managedkafka.v1.Topic value) {
if (topicsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTopicsIsMutable();
topics_.add(value);
onChanged();
} else {
topicsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder addTopics(int index, com.google.cloud.managedkafka.v1.Topic value) {
if (topicsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTopicsIsMutable();
topics_.add(index, value);
onChanged();
} else {
topicsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder addTopics(com.google.cloud.managedkafka.v1.Topic.Builder builderForValue) {
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
topics_.add(builderForValue.build());
onChanged();
} else {
topicsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder addTopics(
int index, com.google.cloud.managedkafka.v1.Topic.Builder builderForValue) {
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
topics_.add(index, builderForValue.build());
onChanged();
} else {
topicsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder addAllTopics(
java.lang.Iterable<? extends com.google.cloud.managedkafka.v1.Topic> values) {
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, topics_);
onChanged();
} else {
topicsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder clearTopics() {
if (topicsBuilder_ == null) {
topics_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
topicsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public Builder removeTopics(int index) {
if (topicsBuilder_ == null) {
ensureTopicsIsMutable();
topics_.remove(index);
onChanged();
} else {
topicsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Topic.Builder getTopicsBuilder(int index) {
return getTopicsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicsOrBuilder(int index) {
if (topicsBuilder_ == null) {
return topics_.get(index);
} else {
return topicsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public java.util.List<? extends com.google.cloud.managedkafka.v1.TopicOrBuilder>
getTopicsOrBuilderList() {
if (topicsBuilder_ != null) {
return topicsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(topics_);
}
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Topic.Builder addTopicsBuilder() {
return getTopicsFieldBuilder()
.addBuilder(com.google.cloud.managedkafka.v1.Topic.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Topic.Builder addTopicsBuilder(int index) {
return getTopicsFieldBuilder()
.addBuilder(index, com.google.cloud.managedkafka.v1.Topic.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of topics in the requested parent. The order of the topics is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Topic topics = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Topic.Builder> getTopicsBuilderList() {
return getTopicsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
getTopicsFieldBuilder() {
if (topicsBuilder_ == null) {
topicsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>(
topics_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
topics_ = null;
}
return topicsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.ListTopicsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.ListTopicsResponse)
private static final com.google.cloud.managedkafka.v1.ListTopicsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.ListTopicsResponse();
}
public static com.google.cloud.managedkafka.v1.ListTopicsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTopicsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTopicsResponse>() {
@java.lang.Override
public ListTopicsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTopicsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTopicsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListTopicsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 36,761 | java-iam/proto-google-iam-v3beta/src/main/java/com/google/iam/v3beta/UpdatePolicyBindingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/v3beta/policy_bindings_service.proto
// Protobuf Java Version: 3.25.8
package com.google.iam.v3beta;
/**
*
*
* <pre>
* Request message for UpdatePolicyBinding method.
* </pre>
*
* Protobuf type {@code google.iam.v3beta.UpdatePolicyBindingRequest}
*/
public final class UpdatePolicyBindingRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.iam.v3beta.UpdatePolicyBindingRequest)
UpdatePolicyBindingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdatePolicyBindingRequest.newBuilder() to construct.
private UpdatePolicyBindingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdatePolicyBindingRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdatePolicyBindingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.v3beta.PolicyBindingsServiceProto
.internal_static_google_iam_v3beta_UpdatePolicyBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.v3beta.PolicyBindingsServiceProto
.internal_static_google_iam_v3beta_UpdatePolicyBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.v3beta.UpdatePolicyBindingRequest.class,
com.google.iam.v3beta.UpdatePolicyBindingRequest.Builder.class);
}
private int bitField0_;
public static final int POLICY_BINDING_FIELD_NUMBER = 1;
private com.google.iam.v3beta.PolicyBinding policyBinding_;
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the policyBinding field is set.
*/
@java.lang.Override
public boolean hasPolicyBinding() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The policyBinding.
*/
@java.lang.Override
public com.google.iam.v3beta.PolicyBinding getPolicyBinding() {
return policyBinding_ == null
? com.google.iam.v3beta.PolicyBinding.getDefaultInstance()
: policyBinding_;
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.iam.v3beta.PolicyBindingOrBuilder getPolicyBindingOrBuilder() {
return policyBinding_ == null
? com.google.iam.v3beta.PolicyBinding.getDefaultInstance()
: policyBinding_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 2;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the update, but do not
* actually post it.
* </pre>
*
* <code>bool validate_only = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPolicyBinding());
}
if (validateOnly_ != false) {
output.writeBool(2, validateOnly_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPolicyBinding());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, validateOnly_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.iam.v3beta.UpdatePolicyBindingRequest)) {
return super.equals(obj);
}
com.google.iam.v3beta.UpdatePolicyBindingRequest other =
(com.google.iam.v3beta.UpdatePolicyBindingRequest) obj;
if (hasPolicyBinding() != other.hasPolicyBinding()) return false;
if (hasPolicyBinding()) {
if (!getPolicyBinding().equals(other.getPolicyBinding())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPolicyBinding()) {
hash = (37 * hash) + POLICY_BINDING_FIELD_NUMBER;
hash = (53 * hash) + getPolicyBinding().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.iam.v3beta.UpdatePolicyBindingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdatePolicyBinding method.
* </pre>
*
* Protobuf type {@code google.iam.v3beta.UpdatePolicyBindingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.iam.v3beta.UpdatePolicyBindingRequest)
com.google.iam.v3beta.UpdatePolicyBindingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.v3beta.PolicyBindingsServiceProto
.internal_static_google_iam_v3beta_UpdatePolicyBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.v3beta.PolicyBindingsServiceProto
.internal_static_google_iam_v3beta_UpdatePolicyBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.v3beta.UpdatePolicyBindingRequest.class,
com.google.iam.v3beta.UpdatePolicyBindingRequest.Builder.class);
}
// Construct using com.google.iam.v3beta.UpdatePolicyBindingRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPolicyBindingFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
policyBinding_ = null;
if (policyBindingBuilder_ != null) {
policyBindingBuilder_.dispose();
policyBindingBuilder_ = null;
}
validateOnly_ = false;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.iam.v3beta.PolicyBindingsServiceProto
.internal_static_google_iam_v3beta_UpdatePolicyBindingRequest_descriptor;
}
@java.lang.Override
public com.google.iam.v3beta.UpdatePolicyBindingRequest getDefaultInstanceForType() {
return com.google.iam.v3beta.UpdatePolicyBindingRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.iam.v3beta.UpdatePolicyBindingRequest build() {
com.google.iam.v3beta.UpdatePolicyBindingRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.iam.v3beta.UpdatePolicyBindingRequest buildPartial() {
com.google.iam.v3beta.UpdatePolicyBindingRequest result =
new com.google.iam.v3beta.UpdatePolicyBindingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.iam.v3beta.UpdatePolicyBindingRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.policyBinding_ =
policyBindingBuilder_ == null ? policyBinding_ : policyBindingBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.validateOnly_ = validateOnly_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.iam.v3beta.UpdatePolicyBindingRequest) {
return mergeFrom((com.google.iam.v3beta.UpdatePolicyBindingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.iam.v3beta.UpdatePolicyBindingRequest other) {
if (other == com.google.iam.v3beta.UpdatePolicyBindingRequest.getDefaultInstance())
return this;
if (other.hasPolicyBinding()) {
mergePolicyBinding(other.getPolicyBinding());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getPolicyBindingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.iam.v3beta.PolicyBinding policyBinding_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.iam.v3beta.PolicyBinding,
com.google.iam.v3beta.PolicyBinding.Builder,
com.google.iam.v3beta.PolicyBindingOrBuilder>
policyBindingBuilder_;
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the policyBinding field is set.
*/
public boolean hasPolicyBinding() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The policyBinding.
*/
public com.google.iam.v3beta.PolicyBinding getPolicyBinding() {
if (policyBindingBuilder_ == null) {
return policyBinding_ == null
? com.google.iam.v3beta.PolicyBinding.getDefaultInstance()
: policyBinding_;
} else {
return policyBindingBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPolicyBinding(com.google.iam.v3beta.PolicyBinding value) {
if (policyBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policyBinding_ = value;
} else {
policyBindingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPolicyBinding(com.google.iam.v3beta.PolicyBinding.Builder builderForValue) {
if (policyBindingBuilder_ == null) {
policyBinding_ = builderForValue.build();
} else {
policyBindingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePolicyBinding(com.google.iam.v3beta.PolicyBinding value) {
if (policyBindingBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& policyBinding_ != null
&& policyBinding_ != com.google.iam.v3beta.PolicyBinding.getDefaultInstance()) {
getPolicyBindingBuilder().mergeFrom(value);
} else {
policyBinding_ = value;
}
} else {
policyBindingBuilder_.mergeFrom(value);
}
if (policyBinding_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPolicyBinding() {
bitField0_ = (bitField0_ & ~0x00000001);
policyBinding_ = null;
if (policyBindingBuilder_ != null) {
policyBindingBuilder_.dispose();
policyBindingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.iam.v3beta.PolicyBinding.Builder getPolicyBindingBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPolicyBindingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.iam.v3beta.PolicyBindingOrBuilder getPolicyBindingOrBuilder() {
if (policyBindingBuilder_ != null) {
return policyBindingBuilder_.getMessageOrBuilder();
} else {
return policyBinding_ == null
? com.google.iam.v3beta.PolicyBinding.getDefaultInstance()
: policyBinding_;
}
}
/**
*
*
* <pre>
* Required. The policy binding to update.
*
* The policy binding's `name` field is used to identify the policy binding to
* update.
* </pre>
*
* <code>
* .google.iam.v3beta.PolicyBinding policy_binding = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.iam.v3beta.PolicyBinding,
com.google.iam.v3beta.PolicyBinding.Builder,
com.google.iam.v3beta.PolicyBindingOrBuilder>
getPolicyBindingFieldBuilder() {
if (policyBindingBuilder_ == null) {
policyBindingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.iam.v3beta.PolicyBinding,
com.google.iam.v3beta.PolicyBinding.Builder,
com.google.iam.v3beta.PolicyBindingOrBuilder>(
getPolicyBinding(), getParentForChildren(), isClean());
policyBinding_ = null;
}
return policyBindingBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the update, but do not
* actually post it.
* </pre>
*
* <code>bool validate_only = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the update, but do not
* actually post it.
* </pre>
*
* <code>bool validate_only = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set, validate the request and preview the update, but do not
* actually post it.
* </pre>
*
* <code>bool validate_only = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000002);
validateOnly_ = false;
onChanged();
return this;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000004);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.iam.v3beta.UpdatePolicyBindingRequest)
}
// @@protoc_insertion_point(class_scope:google.iam.v3beta.UpdatePolicyBindingRequest)
private static final com.google.iam.v3beta.UpdatePolicyBindingRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.iam.v3beta.UpdatePolicyBindingRequest();
}
public static com.google.iam.v3beta.UpdatePolicyBindingRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdatePolicyBindingRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdatePolicyBindingRequest>() {
@java.lang.Override
public UpdatePolicyBindingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdatePolicyBindingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdatePolicyBindingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.iam.v3beta.UpdatePolicyBindingRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,471 | jdk/src/share/classes/sun/security/ec/CurveDB.java | /*
* Copyright (c) 2006, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.ec;
import java.math.BigInteger;
import java.security.spec.*;
import java.util.*;
import java.util.regex.Pattern;
/**
* Repository for well-known Elliptic Curve parameters. It is used by both
* the SunPKCS11 and SunJSSE code.
*
* @since 1.6
* @author Andreas Sterbenz
*/
public class CurveDB {
private final static int P = 1; // prime curve
private final static int B = 2; // binary curve
private final static int PD = 5; // prime curve, mark as default
private final static int BD = 6; // binary curve, mark as default
private static final Map<String,NamedCurve> oidMap =
new LinkedHashMap<String,NamedCurve>();
private static final Map<String,NamedCurve> nameMap =
new HashMap<String,NamedCurve>();
private static final Map<Integer,NamedCurve> lengthMap =
new HashMap<Integer,NamedCurve>();
private static Collection<? extends NamedCurve> specCollection;
static final String SPLIT_PATTERN = ",|\\[|\\]";
// Used by SunECEntries
static Collection<? extends NamedCurve>getSupportedCurves() {
return specCollection;
}
// Return a NamedCurve for the specified OID/name or null if unknown.
static NamedCurve lookup(String name) {
NamedCurve spec = oidMap.get(name);
if (spec != null) {
return spec;
}
return nameMap.get(name);
}
// Return EC parameters for the specified field size. If there are known
// NIST recommended parameters for the given length, they are returned.
// Otherwise, if there are multiple matches for the given size, an
// arbitrary one is returns.
// If no parameters are known, the method returns null.
// NOTE that this method returns both prime and binary curves.
static NamedCurve lookup(int length) {
return lengthMap.get(length);
}
// Convert the given ECParameterSpec object to a NamedCurve object.
// If params does not represent a known named curve, return null.
static NamedCurve lookup(ECParameterSpec params) {
if ((params instanceof NamedCurve) || (params == null)) {
return (NamedCurve)params;
}
// This is a hack to allow SunJSSE to work with 3rd party crypto
// providers for ECC and not just SunPKCS11.
// This can go away once we decide how to expose curve names in the
// public API.
// Note that it assumes that the 3rd party provider encodes named
// curves using the short form, not explicitly. If it did that, then
// the SunJSSE TLS ECC extensions are wrong, which could lead to
// interoperability problems.
int fieldSize = params.getCurve().getField().getFieldSize();
for (NamedCurve namedCurve : specCollection) {
// ECParameterSpec does not define equals, so check all the
// components ourselves.
// Quick field size check first
if (namedCurve.getCurve().getField().getFieldSize() != fieldSize) {
continue;
}
if (namedCurve.getCurve().equals(params.getCurve()) == false) {
continue;
}
if (namedCurve.getGenerator().equals(params.getGenerator()) ==
false) {
continue;
}
if (namedCurve.getOrder().equals(params.getOrder()) == false) {
continue;
}
if (namedCurve.getCofactor() != params.getCofactor()) {
continue;
}
// everything matches our named curve, return it
return namedCurve;
}
// no match found
return null;
}
private static BigInteger bi(String s) {
return new BigInteger(s, 16);
}
private static void add(String name, String soid, int type, String sfield,
String a, String b, String x, String y, String n, int h,
Pattern nameSplitPattern) {
BigInteger p = bi(sfield);
ECField field;
if ((type == P) || (type == PD)) {
field = new ECFieldFp(p);
} else if ((type == B) || (type == BD)) {
field = new ECFieldF2m(p.bitLength() - 1, p);
} else {
throw new RuntimeException("Invalid type: " + type);
}
EllipticCurve curve = new EllipticCurve(field, bi(a), bi(b));
ECPoint g = new ECPoint(bi(x), bi(y));
NamedCurve params = new NamedCurve(name, soid, curve, g, bi(n), h);
if (oidMap.put(soid, params) != null) {
throw new RuntimeException("Duplication oid: " + soid);
}
String[] commonNames = nameSplitPattern.split(name);
for (String commonName : commonNames) {
if (nameMap.put(commonName.trim(), params) != null) {
throw new RuntimeException("Duplication name: " + commonName);
}
}
int len = field.getFieldSize();
if ((type == PD) || (type == BD) || (lengthMap.get(len) == null)) {
// add entry if none present for this field size or if
// the curve is marked as a default curve.
lengthMap.put(len, params);
}
}
static {
Pattern nameSplitPattern = Pattern.compile(SPLIT_PATTERN);
/* SEC2 prime curves */
add("secp112r1", "1.3.132.0.6", P,
"DB7C2ABF62E35E668076BEAD208B",
"DB7C2ABF62E35E668076BEAD2088",
"659EF8BA043916EEDE8911702B22",
"09487239995A5EE76B55F9C2F098",
"A89CE5AF8724C0A23E0E0FF77500",
"DB7C2ABF62E35E7628DFAC6561C5",
1, nameSplitPattern);
add("secp112r2", "1.3.132.0.7", P,
"DB7C2ABF62E35E668076BEAD208B",
"6127C24C05F38A0AAAF65C0EF02C",
"51DEF1815DB5ED74FCC34C85D709",
"4BA30AB5E892B4E1649DD0928643",
"adcd46f5882e3747def36e956e97",
"36DF0AAFD8B8D7597CA10520D04B",
4, nameSplitPattern);
add("secp128r1", "1.3.132.0.28", P,
"FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF",
"FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFC",
"E87579C11079F43DD824993C2CEE5ED3",
"161FF7528B899B2D0C28607CA52C5B86",
"CF5AC8395BAFEB13C02DA292DDED7A83",
"FFFFFFFE0000000075A30D1B9038A115",
1, nameSplitPattern);
add("secp128r2", "1.3.132.0.29", P,
"FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF",
"D6031998D1B3BBFEBF59CC9BBFF9AEE1",
"5EEEFCA380D02919DC2C6558BB6D8A5D",
"7B6AA5D85E572983E6FB32A7CDEBC140",
"27B6916A894D3AEE7106FE805FC34B44",
"3FFFFFFF7FFFFFFFBE0024720613B5A3",
4, nameSplitPattern);
add("secp160k1", "1.3.132.0.9", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73",
"0000000000000000000000000000000000000000",
"0000000000000000000000000000000000000007",
"3B4C382CE37AA192A4019E763036F4F5DD4D7EBB",
"938CF935318FDCED6BC28286531733C3F03C4FEE",
"0100000000000000000001B8FA16DFAB9ACA16B6B3",
1, nameSplitPattern);
add("secp160r1", "1.3.132.0.8", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFF",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFC",
"1C97BEFC54BD7A8B65ACF89F81D4D4ADC565FA45",
"4A96B5688EF573284664698968C38BB913CBFC82",
"23A628553168947D59DCC912042351377AC5FB32",
"0100000000000000000001F4C8F927AED3CA752257",
1, nameSplitPattern);
add("secp160r2", "1.3.132.0.30", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70",
"B4E134D3FB59EB8BAB57274904664D5AF50388BA",
"52DCB034293A117E1F4FF11B30F7199D3144CE6D",
"FEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E",
"0100000000000000000000351EE786A818F3A1A16B",
1, nameSplitPattern);
add("secp192k1", "1.3.132.0.31", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFEE37",
"000000000000000000000000000000000000000000000000",
"000000000000000000000000000000000000000000000003",
"DB4FF10EC057E9AE26B07D0280B7F4341DA5D1B1EAE06C7D",
"9B2F2F6D9C5628A7844163D015BE86344082AA88D95E2F9D",
"FFFFFFFFFFFFFFFFFFFFFFFE26F2FC170F69466A74DEFD8D",
1, nameSplitPattern);
add("secp192r1 [NIST P-192, X9.62 prime192v1]", "1.2.840.10045.3.1.1", PD,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC",
"64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1",
"188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012",
"07192B95FFC8DA78631011ED6B24CDD573F977A11E794811",
"FFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831",
1, nameSplitPattern);
add("secp224k1", "1.3.132.0.32", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFE56D",
"00000000000000000000000000000000000000000000000000000000",
"00000000000000000000000000000000000000000000000000000005",
"A1455B334DF099DF30FC28A169A467E9E47075A90F7E650EB6B7A45C",
"7E089FED7FBA344282CAFBD6F7E319F7C0B0BD59E2CA4BDB556D61A5",
"010000000000000000000000000001DCE8D2EC6184CAF0A971769FB1F7",
1, nameSplitPattern);
add("secp224r1 [NIST P-224]", "1.3.132.0.33", PD,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE",
"B4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4",
"B70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21",
"BD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D",
1, nameSplitPattern);
add("secp256k1", "1.3.132.0.10", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F",
"0000000000000000000000000000000000000000000000000000000000000000",
"0000000000000000000000000000000000000000000000000000000000000007",
"79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
"483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",
1, nameSplitPattern);
add("secp256r1 [NIST P-256, X9.62 prime256v1]", "1.2.840.10045.3.1.7", PD,
"FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF",
"FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC",
"5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B",
"6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296",
"4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5",
"FFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551",
1, nameSplitPattern);
add("secp384r1 [NIST P-384]", "1.3.132.0.34", PD,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFC",
"B3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF",
"AA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7",
"3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973",
1, nameSplitPattern);
add("secp521r1 [NIST P-521]", "1.3.132.0.35", PD,
"01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC",
"0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00",
"00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66",
"011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650",
"01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409",
1, nameSplitPattern);
/* ANSI X9.62 prime curves */
add("X9.62 prime192v2", "1.2.840.10045.3.1.2", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC",
"CC22D6DFB95C6B25E49C0D6364A4E5980C393AA21668D953",
"EEA2BAE7E1497842F2DE7769CFE9C989C072AD696F48034A",
"6574D11D69B6EC7A672BB82A083DF2F2B0847DE970B2DE15",
"FFFFFFFFFFFFFFFFFFFFFFFE5FB1A724DC80418648D8DD31",
1, nameSplitPattern);
add("X9.62 prime192v3", "1.2.840.10045.3.1.3", P,
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF",
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC",
"22123DC2395A05CAA7423DAECCC94760A7D462256BD56916",
"7D29778100C65A1DA1783716588DCE2B8B4AEE8E228F1896",
"38A90F22637337334B49DCB66A6DC8F9978ACA7648A943B0",
"FFFFFFFFFFFFFFFFFFFFFFFF7A62D031C83F4294F640EC13",
1, nameSplitPattern);
add("X9.62 prime239v1", "1.2.840.10045.3.1.4", P,
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF",
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC",
"6B016C3BDCF18941D0D654921475CA71A9DB2FB27D1D37796185C2942C0A",
"0FFA963CDCA8816CCC33B8642BEDF905C3D358573D3F27FBBD3B3CB9AAAF",
"7DEBE8E4E90A5DAE6E4054CA530BA04654B36818CE226B39FCCB7B02F1AE",
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFF9E5E9A9F5D9071FBD1522688909D0B",
1, nameSplitPattern);
add("X9.62 prime239v2", "1.2.840.10045.3.1.5", P,
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF",
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC",
"617FAB6832576CBBFED50D99F0249C3FEE58B94BA0038C7AE84C8C832F2C",
"38AF09D98727705120C921BB5E9E26296A3CDCF2F35757A0EAFD87B830E7",
"5B0125E4DBEA0EC7206DA0FC01D9B081329FB555DE6EF460237DFF8BE4BA",
"7FFFFFFFFFFFFFFFFFFFFFFF800000CFA7E8594377D414C03821BC582063",
1, nameSplitPattern);
add("X9.62 prime239v3", "1.2.840.10045.3.1.6", P,
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF",
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC",
"255705FA2A306654B1F4CB03D6A750A30C250102D4988717D9BA15AB6D3E",
"6768AE8E18BB92CFCF005C949AA2C6D94853D0E660BBF854B1C9505FE95A",
"1607E6898F390C06BC1D552BAD226F3B6FCFE48B6E818499AF18E3ED6CF3",
"7FFFFFFFFFFFFFFFFFFFFFFF7FFFFF975DEB41B3A6057C3C432146526551",
1, nameSplitPattern);
/* SEC2 binary curves */
add("sect113r1", "1.3.132.0.4", B,
"020000000000000000000000000201",
"003088250CA6E7C7FE649CE85820F7",
"00E8BEE4D3E2260744188BE0E9C723",
"009D73616F35F4AB1407D73562C10F",
"00A52830277958EE84D1315ED31886",
"0100000000000000D9CCEC8A39E56F",
2, nameSplitPattern);
add("sect113r2", "1.3.132.0.5", B,
"020000000000000000000000000201",
"00689918DBEC7E5A0DD6DFC0AA55C7",
"0095E9A9EC9B297BD4BF36E059184F",
"01A57A6A7B26CA5EF52FCDB8164797",
"00B3ADC94ED1FE674C06E695BABA1D",
"010000000000000108789B2496AF93",
2, nameSplitPattern);
add("sect131r1", "1.3.132.0.22", B,
"080000000000000000000000000000010D",
"07A11B09A76B562144418FF3FF8C2570B8",
"0217C05610884B63B9C6C7291678F9D341",
"0081BAF91FDF9833C40F9C181343638399",
"078C6E7EA38C001F73C8134B1B4EF9E150",
"0400000000000000023123953A9464B54D",
2, nameSplitPattern);
add("sect131r2", "1.3.132.0.23", B,
"080000000000000000000000000000010D",
"03E5A88919D7CAFCBF415F07C2176573B2",
"04B8266A46C55657AC734CE38F018F2192",
"0356DCD8F2F95031AD652D23951BB366A8",
"0648F06D867940A5366D9E265DE9EB240F",
"0400000000000000016954A233049BA98F",
2, nameSplitPattern);
add("sect163k1 [NIST K-163]", "1.3.132.0.1", BD,
"0800000000000000000000000000000000000000C9",
"000000000000000000000000000000000000000001",
"000000000000000000000000000000000000000001",
"02FE13C0537BBC11ACAA07D793DE4E6D5E5C94EEE8",
"0289070FB05D38FF58321F2E800536D538CCDAA3D9",
"04000000000000000000020108A2E0CC0D99F8A5EF",
2, nameSplitPattern);
add("sect163r1", "1.3.132.0.2", B,
"0800000000000000000000000000000000000000C9",
"07B6882CAAEFA84F9554FF8428BD88E246D2782AE2",
"0713612DCDDCB40AAB946BDA29CA91F73AF958AFD9",
"0369979697AB43897789566789567F787A7876A654",
"00435EDB42EFAFB2989D51FEFCE3C80988F41FF883",
"03FFFFFFFFFFFFFFFFFFFF48AAB689C29CA710279B",
2, nameSplitPattern);
add("sect163r2 [NIST B-163]", "1.3.132.0.15", BD,
"0800000000000000000000000000000000000000C9",
"000000000000000000000000000000000000000001",
"020A601907B8C953CA1481EB10512F78744A3205FD",
"03F0EBA16286A2D57EA0991168D4994637E8343E36",
"00D51FBC6C71A0094FA2CDD545B11C5C0C797324F1",
"040000000000000000000292FE77E70C12A4234C33",
2, nameSplitPattern);
add("sect193r1", "1.3.132.0.24", B,
"02000000000000000000000000000000000000000000008001",
"0017858FEB7A98975169E171F77B4087DE098AC8A911DF7B01",
"00FDFB49BFE6C3A89FACADAA7A1E5BBC7CC1C2E5D831478814",
"01F481BC5F0FF84A74AD6CDF6FDEF4BF6179625372D8C0C5E1",
"0025E399F2903712CCF3EA9E3A1AD17FB0B3201B6AF7CE1B05",
"01000000000000000000000000C7F34A778F443ACC920EBA49",
2, nameSplitPattern);
add("sect193r2", "1.3.132.0.25", B,
"02000000000000000000000000000000000000000000008001",
"0163F35A5137C2CE3EA6ED8667190B0BC43ECD69977702709B",
"00C9BB9E8927D4D64C377E2AB2856A5B16E3EFB7F61D4316AE",
"00D9B67D192E0367C803F39E1A7E82CA14A651350AAE617E8F",
"01CE94335607C304AC29E7DEFBD9CA01F596F927224CDECF6C",
"010000000000000000000000015AAB561B005413CCD4EE99D5",
2, nameSplitPattern);
add("sect233k1 [NIST K-233]", "1.3.132.0.26", BD,
"020000000000000000000000000000000000000004000000000000000001",
"000000000000000000000000000000000000000000000000000000000000",
"000000000000000000000000000000000000000000000000000000000001",
"017232BA853A7E731AF129F22FF4149563A419C26BF50A4C9D6EEFAD6126",
"01DB537DECE819B7F70F555A67C427A8CD9BF18AEB9B56E0C11056FAE6A3",
"008000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF",
4, nameSplitPattern);
add("sect233r1 [NIST B-233]", "1.3.132.0.27", B,
"020000000000000000000000000000000000000004000000000000000001",
"000000000000000000000000000000000000000000000000000000000001",
"0066647EDE6C332C7F8C0923BB58213B333B20E9CE4281FE115F7D8F90AD",
"00FAC9DFCBAC8313BB2139F1BB755FEF65BC391F8B36F8F8EB7371FD558B",
"01006A08A41903350678E58528BEBF8A0BEFF867A7CA36716F7E01F81052",
"01000000000000000000000000000013E974E72F8A6922031D2603CFE0D7",
2, nameSplitPattern);
add("sect239k1", "1.3.132.0.3", B,
"800000000000000000004000000000000000000000000000000000000001",
"000000000000000000000000000000000000000000000000000000000000",
"000000000000000000000000000000000000000000000000000000000001",
"29A0B6A887A983E9730988A68727A8B2D126C44CC2CC7B2A6555193035DC",
"76310804F12E549BDB011C103089E73510ACB275FC312A5DC6B76553F0CA",
"2000000000000000000000000000005A79FEC67CB6E91F1C1DA800E478A5",
4, nameSplitPattern);
add("sect283k1 [NIST K-283]", "1.3.132.0.16", BD,
"0800000000000000000000000000000000000000000000000000000000000000000010A1",
"000000000000000000000000000000000000000000000000000000000000000000000000",
"000000000000000000000000000000000000000000000000000000000000000000000001",
"0503213F78CA44883F1A3B8162F188E553CD265F23C1567A16876913B0C2AC2458492836",
"01CCDA380F1C9E318D90F95D07E5426FE87E45C0E8184698E45962364E34116177DD2259",
"01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61",
4, nameSplitPattern);
add("sect283r1 [NIST B-283]", "1.3.132.0.17", B,
"0800000000000000000000000000000000000000000000000000000000000000000010A1",
"000000000000000000000000000000000000000000000000000000000000000000000001",
"027B680AC8B8596DA5A4AF8A19A0303FCA97FD7645309FA2A581485AF6263E313B79A2F5",
"05F939258DB7DD90E1934F8C70B0DFEC2EED25B8557EAC9C80E2E198F8CDBECD86B12053",
"03676854FE24141CB98FE6D4B20D02B4516FF702350EDDB0826779C813F0DF45BE8112F4",
"03FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307",
2, nameSplitPattern);
add("sect409k1 [NIST K-409]", "1.3.132.0.36", BD,
"02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001",
"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001",
"0060F05F658F49C1AD3AB1890F7184210EFD0987E307C84C27ACCFB8F9F67CC2C460189EB5AAAA62EE222EB1B35540CFE9023746",
"01E369050B7C4E42ACBA1DACBF04299C3460782F918EA427E6325165E9EA10E3DA5F6C42E9C55215AA9CA27A5863EC48D8E0286B",
"007FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF",
4, nameSplitPattern);
add("sect409r1 [NIST B-409]", "1.3.132.0.37", B,
"02000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000001",
"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001",
"0021A5C2C8EE9FEB5C4B9A753B7B476B7FD6422EF1F3DD674761FA99D6AC27C8A9A197B272822F6CD57A55AA4F50AE317B13545F",
"015D4860D088DDB3496B0C6064756260441CDE4AF1771D4DB01FFE5B34E59703DC255A868A1180515603AEAB60794E54BB7996A7",
"0061B1CFAB6BE5F32BBFA78324ED106A7636B9C5A7BD198D0158AA4F5488D08F38514F1FDF4B4F40D2181B3681C364BA0273C706",
"010000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173",
2, nameSplitPattern);
add("sect571k1 [NIST K-571]", "1.3.132.0.38", BD,
"080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425",
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001",
"026EB7A859923FBC82189631F8103FE4AC9CA2970012D5D46024804801841CA44370958493B205E647DA304DB4CEB08CBBD1BA39494776FB988B47174DCA88C7E2945283A01C8972",
"0349DC807F4FBF374F4AEADE3BCA95314DD58CEC9F307A54FFC61EFC006D8A2C9D4979C0AC44AEA74FBEBBB9F772AEDCB620B01A7BA7AF1B320430C8591984F601CD4C143EF1C7A3",
"020000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001",
4, nameSplitPattern);
add("sect571r1 [NIST B-571]", "1.3.132.0.39", B,
"080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000425",
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001",
"02F40E7E2221F295DE297117B7F3D62F5C6A97FFCB8CEFF1CD6BA8CE4A9A18AD84FFABBD8EFA59332BE7AD6756A66E294AFD185A78FF12AA520E4DE739BACA0C7FFEFF7F2955727A",
"0303001D34B856296C16C0D40D3CD7750A93D1D2955FA80AA5F40FC8DB7B2ABDBDE53950F4C0D293CDD711A35B67FB1499AE60038614F1394ABFA3B4C850D927E1E7769C8EEC2D19",
"037BF27342DA639B6DCCFFFEB73D69D78C6C27A6009CBBCA1980F8533921E8A684423E43BAB08A576291AF8F461BB2A8B3531D2F0485C19B16E2F1516E23DD3C1A4827AF1B8AC15B",
"03FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47",
2, nameSplitPattern);
/* ANSI X9.62 binary curves */
add("X9.62 c2tnb191v1", "1.2.840.10045.3.0.5", B,
"800000000000000000000000000000000000000000000201",
"2866537B676752636A68F56554E12640276B649EF7526267",
"2E45EF571F00786F67B0081B9495A3D95462F5DE0AA185EC",
"36B3DAF8A23206F9C4F299D7B21A9C369137F2C84AE1AA0D",
"765BE73433B3F95E332932E70EA245CA2418EA0EF98018FB",
"40000000000000000000000004A20E90C39067C893BBB9A5",
2, nameSplitPattern);
add("X9.62 c2tnb191v2", "1.2.840.10045.3.0.6", B,
"800000000000000000000000000000000000000000000201",
"401028774D7777C7B7666D1366EA432071274F89FF01E718",
"0620048D28BCBD03B6249C99182B7C8CD19700C362C46A01",
"3809B2B7CC1B28CC5A87926AAD83FD28789E81E2C9E3BF10",
"17434386626D14F3DBF01760D9213A3E1CF37AEC437D668A",
"20000000000000000000000050508CB89F652824E06B8173",
4, nameSplitPattern);
add("X9.62 c2tnb191v3", "1.2.840.10045.3.0.7", B,
"800000000000000000000000000000000000000000000201",
"6C01074756099122221056911C77D77E77A777E7E7E77FCB",
"71FE1AF926CF847989EFEF8DB459F66394D90F32AD3F15E8",
"375D4CE24FDE434489DE8746E71786015009E66E38A926DD",
"545A39176196575D985999366E6AD34CE0A77CD7127B06BE",
"155555555555555555555555610C0B196812BFB6288A3EA3",
6, nameSplitPattern);
add("X9.62 c2tnb239v1", "1.2.840.10045.3.0.11", B,
"800000000000000000000000000000000000000000000000001000000001",
"32010857077C5431123A46B808906756F543423E8D27877578125778AC76",
"790408F2EEDAF392B012EDEFB3392F30F4327C0CA3F31FC383C422AA8C16",
"57927098FA932E7C0A96D3FD5B706EF7E5F5C156E16B7E7C86038552E91D",
"61D8EE5077C33FECF6F1A16B268DE469C3C7744EA9A971649FC7A9616305",
"2000000000000000000000000000000F4D42FFE1492A4993F1CAD666E447",
4, nameSplitPattern);
add("X9.62 c2tnb239v2", "1.2.840.10045.3.0.12", B,
"800000000000000000000000000000000000000000000000001000000001",
"4230017757A767FAE42398569B746325D45313AF0766266479B75654E65F",
"5037EA654196CFF0CD82B2C14A2FCF2E3FF8775285B545722F03EACDB74B",
"28F9D04E900069C8DC47A08534FE76D2B900B7D7EF31F5709F200C4CA205",
"5667334C45AFF3B5A03BAD9DD75E2C71A99362567D5453F7FA6E227EC833",
"1555555555555555555555555555553C6F2885259C31E3FCDF154624522D",
6, nameSplitPattern);
add("X9.62 c2tnb239v3", "1.2.840.10045.3.0.13", B,
"800000000000000000000000000000000000000000000000001000000001",
"01238774666A67766D6676F778E676B66999176666E687666D8766C66A9F",
"6A941977BA9F6A435199ACFC51067ED587F519C5ECB541B8E44111DE1D40",
"70F6E9D04D289C4E89913CE3530BFDE903977D42B146D539BF1BDE4E9C92",
"2E5A0EAF6E5E1305B9004DCE5C0ED7FE59A35608F33837C816D80B79F461",
"0CCCCCCCCCCCCCCCCCCCCCCCCCCCCCAC4912D2D9DF903EF9888B8A0E4CFF",
0xA, nameSplitPattern);
add("X9.62 c2tnb359v1", "1.2.840.10045.3.0.18", B,
"800000000000000000000000000000000000000000000000000000000000000000000000100000000000000001",
"5667676A654B20754F356EA92017D946567C46675556F19556A04616B567D223A5E05656FB549016A96656A557",
"2472E2D0197C49363F1FE7F5B6DB075D52B6947D135D8CA445805D39BC345626089687742B6329E70680231988",
"3C258EF3047767E7EDE0F1FDAA79DAEE3841366A132E163ACED4ED2401DF9C6BDCDE98E8E707C07A2239B1B097",
"53D7E08529547048121E9C95F3791DD804963948F34FAE7BF44EA82365DC7868FE57E4AE2DE211305A407104BD",
"01AF286BCA1AF286BCA1AF286BCA1AF286BCA1AF286BC9FB8F6B85C556892C20A7EB964FE7719E74F490758D3B",
0x4C, nameSplitPattern);
add("X9.62 c2tnb431r1", "1.2.840.10045.3.0.20", B,
"800000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000001",
"1A827EF00DD6FC0E234CAF046C6A5D8A85395B236CC4AD2CF32A0CADBDC9DDF620B0EB9906D0957F6C6FEACD615468DF104DE296CD8F",
"10D9B4A3D9047D8B154359ABFB1B7F5485B04CEB868237DDC9DEDA982A679A5A919B626D4E50A8DD731B107A9962381FB5D807BF2618",
"120FC05D3C67A99DE161D2F4092622FECA701BE4F50F4758714E8A87BBF2A658EF8C21E7C5EFE965361F6C2999C0C247B0DBD70CE6B7",
"20D0AF8903A96F8D5FA2C255745D3C451B302C9346D9B7E485E7BCE41F6B591F3E8F6ADDCBB0BC4C2F947A7DE1A89B625D6A598B3760",
"0340340340340340340340340340340340340340340340340340340323C313FAB50589703B5EC68D3587FEC60D161CC149C1AD4A91",
0x2760, nameSplitPattern);
/* ANSI X9.62 binary curves from the 1998 standard but forbidden
* in the 2005 version of the standard.
* We don't register them but leave them here for the time being in
* case we need to support them after all.
*/
/*
add("X9.62 c2pnb163v1", "1.2.840.10045.3.0.1", B,
"080000000000000000000000000000000000000107",
"072546B5435234A422E0789675F432C89435DE5242",
"00C9517D06D5240D3CFF38C74B20B6CD4D6F9DD4D9",
"07AF69989546103D79329FCC3D74880F33BBE803CB",
"01EC23211B5966ADEA1D3F87F7EA5848AEF0B7CA9F",
"0400000000000000000001E60FC8821CC74DAEAFC1",
2, nameSplitPattern);
add("X9.62 c2pnb163v2", "1.2.840.10045.3.0.2", B,
"080000000000000000000000000000000000000107",
"0108B39E77C4B108BED981ED0E890E117C511CF072",
"0667ACEB38AF4E488C407433FFAE4F1C811638DF20",
"0024266E4EB5106D0A964D92C4860E2671DB9B6CC5",
"079F684DDF6684C5CD258B3890021B2386DFD19FC5",
"03FFFFFFFFFFFFFFFFFFFDF64DE1151ADBB78F10A7",
2, nameSplitPattern);
add("X9.62 c2pnb163v3", "1.2.840.10045.3.0.3", B,
"080000000000000000000000000000000000000107",
"07A526C63D3E25A256A007699F5447E32AE456B50E",
"03F7061798EB99E238FD6F1BF95B48FEEB4854252B",
"02F9F87B7C574D0BDECF8A22E6524775F98CDEBDCB",
"05B935590C155E17EA48EB3FF3718B893DF59A05D0",
"03FFFFFFFFFFFFFFFFFFFE1AEE140F110AFF961309",
2, nameSplitPattern);
add("X9.62 c2pnb176w1", "1.2.840.10045.3.0.4", B,
"0100000000000000000000000000000000080000000007",
"E4E6DB2995065C407D9D39B8D0967B96704BA8E9C90B",
"5DDA470ABE6414DE8EC133AE28E9BBD7FCEC0AE0FFF2",
"8D16C2866798B600F9F08BB4A8E860F3298CE04A5798",
"6FA4539C2DADDDD6BAB5167D61B436E1D92BB16A562C",
"00010092537397ECA4F6145799D62B0A19CE06FE26AD",
0xFF6E, nameSplitPattern);
add("X9.62 c2pnb208w1", "1.2.840.10045.3.0.10", B,
"010000000000000000000000000000000800000000000000000007",
"0000000000000000000000000000000000000000000000000000",
"C8619ED45A62E6212E1160349E2BFA844439FAFC2A3FD1638F9E",
"89FDFBE4ABE193DF9559ECF07AC0CE78554E2784EB8C1ED1A57A",
"0F55B51A06E78E9AC38A035FF520D8B01781BEB1A6BB08617DE3",
"000101BAF95C9723C57B6C21DA2EFF2D5ED588BDD5717E212F9D",
0xFE48, nameSplitPattern);
add("X9.62 c2pnb272w1", "1.2.840.10045.3.0.16", B,
"010000000000000000000000000000000000000000000000000000010000000000000B",
"91A091F03B5FBA4AB2CCF49C4EDD220FB028712D42BE752B2C40094DBACDB586FB20",
"7167EFC92BB2E3CE7C8AAAFF34E12A9C557003D7C73A6FAF003F99F6CC8482E540F7",
"6108BABB2CEEBCF787058A056CBE0CFE622D7723A289E08A07AE13EF0D10D171DD8D",
"10C7695716851EEF6BA7F6872E6142FBD241B830FF5EFCACECCAB05E02005DDE9D23",
"000100FAF51354E0E39E4892DF6E319C72C8161603FA45AA7B998A167B8F1E629521",
0xFF06, nameSplitPattern);
add("X9.62 c2pnb304w1", "1.2.840.10045.3.0.17", B,
"010000000000000000000000000000000000000000000000000000000000000000000000000807",
"FD0D693149A118F651E6DCE6802085377E5F882D1B510B44160074C1288078365A0396C8E681",
"BDDB97E555A50A908E43B01C798EA5DAA6788F1EA2794EFCF57166B8C14039601E55827340BE",
"197B07845E9BE2D96ADB0F5F3C7F2CFFBD7A3EB8B6FEC35C7FD67F26DDF6285A644F740A2614",
"E19FBEB76E0DA171517ECF401B50289BF014103288527A9B416A105E80260B549FDC1B92C03B",
"000101D556572AABAC800101D556572AABAC8001022D5C91DD173F8FB561DA6899164443051D",
0xFE2E, nameSplitPattern);
add("X9.62 c2pnb368w1", "1.2.840.10045.3.0.19", B,
"0100000000000000000000000000000000000000000000000000000000000000000000002000000000000000000007",
"E0D2EE25095206F5E2A4F9ED229F1F256E79A0E2B455970D8D0D865BD94778C576D62F0AB7519CCD2A1A906AE30D",
"FC1217D4320A90452C760A58EDCD30C8DD069B3C34453837A34ED50CB54917E1C2112D84D164F444F8F74786046A",
"1085E2755381DCCCE3C1557AFA10C2F0C0C2825646C5B34A394CBCFA8BC16B22E7E789E927BE216F02E1FB136A5F",
"7B3EB1BDDCBA62D5D8B2059B525797FC73822C59059C623A45FF3843CEE8F87CD1855ADAA81E2A0750B80FDA2310",
"00010090512DA9AF72B08349D98A5DD4C7B0532ECA51CE03E2D10F3B7AC579BD87E909AE40A6F131E9CFCE5BD967",
0xFF70, nameSplitPattern);
*/
specCollection = Collections.unmodifiableCollection(oidMap.values());
}
}
|
apache/lucene | 36,637 | lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search.suggest.document;
import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.startsWith;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CyclicBarrier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FilterCodec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.StoredFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.suggest.BitsProducer;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.document.TopSuggestDocs.SuggestScoreDoc;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.OutputStreamDataOutput;
import org.apache.lucene.tests.analysis.MockAnalyzer;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.util.LineFileDocs;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.hamcrest.MatcherAssert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestSuggestField extends LuceneTestCase {
public Directory dir;
@Before
public void before() throws Exception {
dir = newDirectory();
}
@After
public void after() throws Exception {
dir.close();
}
@Test
public void testEmptySuggestion() throws Exception {
IllegalArgumentException expected =
expectThrows(
IllegalArgumentException.class, () -> new SuggestField("suggest_field", "", 3));
MatcherAssert.assertThat(expected.getMessage(), containsString("value"));
}
@Test
public void testNegativeWeight() throws Exception {
IllegalArgumentException expected =
expectThrows(
IllegalArgumentException.class, () -> new SuggestField("suggest_field", "sugg", -1));
MatcherAssert.assertThat(expected.getMessage(), containsString("weight"));
}
@Test
public void testReservedChars() throws Exception {
CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
charsRefBuilder.append("sugg");
charsRefBuilder.setCharAt(2, (char) ConcatenateGraphFilter.SEP_LABEL);
IllegalArgumentException expected =
expectThrows(
IllegalArgumentException.class,
() -> new SuggestField("name", charsRefBuilder.toString(), 1));
MatcherAssert.assertThat(expected.getMessage(), containsString("[0x1f]"));
charsRefBuilder.setCharAt(2, (char) CompletionAnalyzer.HOLE_CHARACTER);
expected =
expectThrows(
IllegalArgumentException.class,
() -> new SuggestField("name", charsRefBuilder.toString(), 1));
MatcherAssert.assertThat(expected.getMessage(), containsString("[0x1e]"));
charsRefBuilder.setCharAt(2, (char) NRTSuggesterBuilder.END_BYTE);
expected =
expectThrows(
IllegalArgumentException.class,
() -> new SuggestField("name", charsRefBuilder.toString(), 1));
MatcherAssert.assertThat(expected.getMessage(), containsString("[0x0]"));
}
@Test
public void testEmpty() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "ab"));
TopSuggestDocs lookupDocs = suggestIndexSearcher.suggest(query, 3, false);
assertEquals(0L, lookupDocs.totalHits.value());
reader.close();
iw.close();
}
@Test
public void testTokenStream() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
SuggestField suggestField = new SuggestField("field", "input", 1);
BytesRef surfaceForm = new BytesRef("input");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
try (OutputStreamDataOutput output = new OutputStreamDataOutput(byteArrayOutputStream)) {
output.writeVInt(surfaceForm.length);
output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length);
output.writeVInt(1 + 1);
output.writeByte(SuggestField.TYPE);
}
BytesRef payload = new BytesRef(byteArrayOutputStream.toByteArray());
TokenStream stream = new PayloadAttrToTypeAttrFilter(suggestField.tokenStream(analyzer, null));
assertTokenStreamContents(
stream,
new String[] {"input"},
null,
null,
new String[] {payload.utf8ToString()},
new int[] {1},
null,
null);
CompletionAnalyzer completionAnalyzer = new CompletionAnalyzer(analyzer);
stream = new PayloadAttrToTypeAttrFilter(suggestField.tokenStream(completionAnalyzer, null));
assertTokenStreamContents(
stream,
new String[] {"input"},
null,
null,
new String[] {payload.utf8ToString()},
new int[] {1},
null,
null);
}
@Test
public void testDupSuggestFieldValues() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
final int num = Math.min(1000, atLeast(100));
int[] weights = new int[num];
for (int i = 0; i < num; i++) {
Document document = new Document();
weights[i] = random().nextInt(Integer.MAX_VALUE);
document.add(new SuggestField("suggest_field", "abc", weights[i]));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
Entry[] expectedEntries = new Entry[num];
Arrays.sort(weights);
for (int i = 1; i <= num; i++) {
expectedEntries[i - 1] = new Entry("abc", weights[num - i]);
}
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc"));
TopSuggestDocs lookupDocs = suggestIndexSearcher.suggest(query, num, false);
assertSuggestions(lookupDocs, expectedEntries);
reader.close();
iw.close();
}
public void testDeduplication() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
final int num = TestUtil.nextInt(random(), 2, 20);
int[] weights = new int[num];
int bestABCWeight = Integer.MIN_VALUE;
int bestABDWeight = Integer.MIN_VALUE;
for (int i = 0; i < num; i++) {
Document document = new Document();
weights[i] = random().nextInt(Integer.MAX_VALUE);
String suggestValue;
boolean doABC;
if (i == 0) {
doABC = true;
} else if (i == 1) {
doABC = false;
} else {
doABC = random().nextBoolean();
}
if (doABC) {
suggestValue = "abc";
bestABCWeight = Math.max(bestABCWeight, weights[i]);
} else {
suggestValue = "abd";
bestABDWeight = Math.max(bestABDWeight, weights[i]);
}
document.add(new SuggestField("suggest_field", suggestValue, weights[i]));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
Entry[] expectedEntries = new Entry[2];
if (bestABDWeight > bestABCWeight) {
expectedEntries[0] = new Entry("abd", bestABDWeight);
expectedEntries[1] = new Entry("abc", bestABCWeight);
} else {
expectedEntries[0] = new Entry("abc", bestABCWeight);
expectedEntries[1] = new Entry("abd", bestABDWeight);
}
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "a"));
TopSuggestDocsCollector collector = new TopSuggestDocsCollector(2, true);
suggestIndexSearcher.suggest(query, collector);
TopSuggestDocs lookupDocs = collector.get();
assertSuggestions(lookupDocs, expectedEntries);
reader.close();
iw.close();
}
public void testExtremeDeduplication() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
final int num = atLeast(500);
int bestWeight = Integer.MIN_VALUE;
for (int i = 0; i < num; i++) {
Document document = new Document();
int weight = TestUtil.nextInt(random(), 10, 100);
bestWeight = Math.max(weight, bestWeight);
document.add(new SuggestField("suggest_field", "abc", weight));
iw.addDocument(document);
if (rarely()) {
iw.commit();
}
}
Document document = new Document();
document.add(new SuggestField("suggest_field", "abd", 7));
iw.addDocument(document);
if (random().nextBoolean()) {
iw.forceMerge(1);
}
DirectoryReader reader = iw.getReader();
Entry[] expectedEntries = new Entry[2];
expectedEntries[0] = new Entry("abc", bestWeight);
expectedEntries[1] = new Entry("abd", 7);
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "a"));
TopSuggestDocsCollector collector = new TopSuggestDocsCollector(2, true);
suggestIndexSearcher.suggest(query, collector);
TopSuggestDocs lookupDocs = collector.get();
assertSuggestions(lookupDocs, expectedEntries);
reader.close();
iw.close();
}
private static String randomSimpleString(int numDigits, int maxLen) {
final int len = TestUtil.nextInt(random(), 1, maxLen);
final char[] chars = new char[len];
for (int j = 0; j < len; j++) {
chars[j] = (char) ('a' + random().nextInt(numDigits));
}
return new String(chars);
}
public void testRandom() throws Exception {
int numDigits = TestUtil.nextInt(random(), 1, 6);
Set<String> keys = new HashSet<>();
int keyCount = TestUtil.nextInt(random(), 1, 20);
if (numDigits == 1) {
keyCount = Math.min(9, keyCount);
}
while (keys.size() < keyCount) {
keys.add(randomSimpleString(numDigits, 10));
}
List<String> keysList = new ArrayList<>(keys);
Analyzer analyzer = new MockAnalyzer(random());
IndexWriterConfig iwc = iwcWithSuggestField(analyzer, "suggest_field");
// we rely on docID order:
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
int docCount = TestUtil.nextInt(random(), 1, 200);
Entry[] docs = new Entry[docCount];
for (int i = 0; i < docCount; i++) {
int weight = random().nextInt(40);
String key = keysList.get(random().nextInt(keyCount));
// System.out.println("KEY: " + key);
docs[i] = new Entry(key, null, weight, i);
Document doc = new Document();
doc.add(new SuggestField("suggest_field", key, weight));
iw.addDocument(doc);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher searcher = new SuggestIndexSearcher(reader);
int iters = atLeast(200);
for (int iter = 0; iter < iters; iter++) {
String prefix = randomSimpleString(numDigits, 2);
if (VERBOSE) {
System.out.println("\nTEST: prefix=" + prefix);
}
// slow but hopefully correct suggester:
List<Entry> expected = new ArrayList<>();
for (Entry doc : docs) {
if (doc.output.startsWith(prefix)) {
expected.add(doc);
}
}
expected.sort(
(a, b) -> {
// sort by higher score:
int cmp = Float.compare(b.value, a.value);
if (cmp == 0) {
// tie-break by completion key
cmp = Lookup.CHARSEQUENCE_COMPARATOR.compare(a.output, b.output);
if (cmp == 0) {
// prefer smaller doc id, in case of a tie
cmp = Integer.compare(a.id, b.id);
}
}
return cmp;
});
boolean dedup = random().nextBoolean();
if (dedup) {
List<Entry> deduped = new ArrayList<>();
Set<String> seen = new HashSet<>();
for (Entry entry : expected) {
if (seen.contains(entry.output) == false) {
seen.add(entry.output);
deduped.add(entry);
}
}
expected = deduped;
}
// TODO: re-enable this, except something is buggy about tie breaks at the topN threshold now:
// int topN = TestUtil.nextInt(random(), 1, docCount+10);
int topN = docCount;
if (VERBOSE) {
if (dedup) {
System.out.println(" expected (dedup'd) topN=" + topN + ":");
} else {
System.out.println(" expected topN=" + topN + ":");
}
for (int i = 0; i < expected.size(); i++) {
if (i >= topN) {
System.out.println(" leftover: " + i + ": " + expected.get(i));
} else {
System.out.println(" " + i + ": " + expected.get(i));
}
}
}
expected = expected.subList(0, Math.min(topN, expected.size()));
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", prefix));
TopSuggestDocsCollector collector = new TopSuggestDocsCollector(topN, dedup);
searcher.suggest(query, collector);
TopSuggestDocs actual = collector.get();
if (VERBOSE) {
System.out.println(" actual:");
SuggestScoreDoc[] suggestScoreDocs = (SuggestScoreDoc[]) actual.scoreDocs;
for (int i = 0; i < suggestScoreDocs.length; i++) {
System.out.println(" " + i + ": " + suggestScoreDocs[i]);
}
}
assertSuggestions(actual, expected.toArray(new Entry[expected.size()]));
}
reader.close();
iw.close();
}
@Test
public void testNRTDeletedDocFiltering() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
// using IndexWriter instead of RandomIndexWriter
IndexWriter iw = new IndexWriter(dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
int numLive = 0;
List<Entry> expectedEntries = new ArrayList<>();
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, num - i));
if (i % 2 == 0) {
document.add(newStringField("str_field", "delete", Field.Store.YES));
} else {
numLive++;
expectedEntries.add(new Entry("abc_" + i, num - i));
document.add(newStringField("str_field", "no_delete", Field.Store.YES));
}
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
iw.deleteDocuments(new Term("str_field", "delete"));
DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, numLive, false);
assertSuggestions(suggest, expectedEntries.toArray(new Entry[expectedEntries.size()]));
reader.close();
iw.close();
}
@Test
public void testSuggestOnAllFilteredDocuments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
document.add(newStringField("str_fld", "deleted", Field.Store.NO));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
BitsProducer filter =
new BitsProducer() {
@Override
public Bits getBits(LeafReaderContext context) throws IOException {
return new Bits.MatchNoBits(context.reader().maxDoc());
}
};
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
// no random access required;
// calling suggest with filter that does not match any documents should early terminate
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"), filter);
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertEquals(0L, suggest.totalHits.value());
reader.close();
iw.close();
}
@Test
public void testSuggestOnAllDeletedDocuments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
// using IndexWriter instead of RandomIndexWriter
IndexWriter iw = new IndexWriter(dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
document.add(newStringField("delete", "delete", Field.Store.NO));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
iw.deleteDocuments(new Term("delete", "delete"));
DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertEquals(0L, suggest.totalHits.value());
reader.close();
iw.close();
}
@Test
public void testSuggestOnMostlyDeletedDocuments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
// using IndexWriter instead of RandomIndexWriter
IndexWriter iw = new IndexWriter(dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 1; i <= num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
document.add(new StoredField("weight_fld", i));
document.add(new IntPoint("weight_fld", i));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
iw.deleteDocuments(IntPoint.newRangeQuery("weight_fld", 2, Integer.MAX_VALUE));
DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, 1, false);
assertSuggestions(suggest, new Entry("abc_1", 1));
reader.close();
iw.close();
}
@Test
public void testMultipleSuggestFieldsPerDoc() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(
random(), dir, iwcWithSuggestField(analyzer, "sug_field_1", "sug_field_2"));
Document document = new Document();
document.add(new SuggestField("sug_field_1", "apple", 4));
document.add(new SuggestField("sug_field_2", "april", 3));
iw.addDocument(document);
document = new Document();
document.add(new SuggestField("sug_field_1", "aples", 3));
document.add(new SuggestField("sug_field_2", "apartment", 2));
iw.addDocument(document);
if (rarely()) {
iw.commit();
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("sug_field_1", "ap"));
TopSuggestDocs suggestDocs1 = suggestIndexSearcher.suggest(query, 4, false);
assertSuggestions(suggestDocs1, new Entry("apple", 4), new Entry("aples", 3));
query = new PrefixCompletionQuery(analyzer, new Term("sug_field_2", "ap"));
TopSuggestDocs suggestDocs2 = suggestIndexSearcher.suggest(query, 4, false);
assertSuggestions(suggestDocs2, new Entry("april", 3), new Entry("apartment", 2));
// check that the doc ids are consistent
for (int i = 0; i < suggestDocs1.scoreDocs.length; i++) {
assertEquals(suggestDocs1.scoreDocs[i].doc, suggestDocs2.scoreDocs[i].doc);
}
reader.close();
iw.close();
}
@Test
public void testEarlyTermination() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
// have segments of 4 documents
// with descending suggestion weights
// suggest should early terminate for
// segments with docs having lower suggestion weights
for (int i = num; i > 0; i--) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
iw.addDocument(document);
if (i % 4 == 0) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, 1, false);
assertSuggestions(suggest, new Entry("abc_" + num, num));
reader.close();
iw.close();
}
@Test
public void testMultipleSegments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
List<Entry> entries = new ArrayList<>();
// ensure at least some segments have no suggest field
for (int i = num; i > 0; i--) {
Document document = new Document();
if (random().nextInt(4) == 1) {
document.add(new SuggestField("suggest_field", "abc_" + i, i));
entries.add(new Entry("abc_" + i, i));
}
document.add(new StoredField("weight_fld", i));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest =
indexSearcher.suggest(query, entries.isEmpty() ? 1 : entries.size(), false);
assertSuggestions(suggest, entries.toArray(new Entry[entries.size()]));
reader.close();
iw.close();
}
@Test
public void testReturnedDocID() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, num));
document.add(new StoredField("int_field", i));
iw.addDocument(document);
if (random().nextBoolean()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertEquals(num, suggest.totalHits.value());
StoredFields storedFields = reader.storedFields();
for (SuggestScoreDoc suggestScoreDoc : suggest.scoreLookupDocs()) {
String key = suggestScoreDoc.key.toString();
MatcherAssert.assertThat(key, startsWith("abc_"));
String substring = key.substring(4);
int fieldValue = Integer.parseInt(substring);
Document doc = storedFields.document(suggestScoreDoc.doc);
assertEquals(doc.getField("int_field").numericValue().intValue(), fieldValue);
}
reader.close();
iw.close();
}
@Test
public void testScoring() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(50));
String[] prefixes = {"abc", "bac", "cab"};
Map<String, Integer> mappings = new HashMap<>();
for (int i = 0; i < num; i++) {
Document document = new Document();
String suggest = prefixes[i % 3] + TestUtil.randomSimpleString(random(), 10) + "_" + i;
int weight = random().nextInt(Integer.MAX_VALUE);
document.add(new SuggestField("suggest_field", suggest, weight));
mappings.put(suggest, weight);
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
for (String prefix : prefixes) {
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", prefix));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertTrue(suggest.totalHits.value() > 0);
float topScore = -1;
for (SuggestScoreDoc scoreDoc : suggest.scoreLookupDocs()) {
if (topScore != -1) {
assertTrue(topScore >= scoreDoc.score);
}
topScore = scoreDoc.score;
assertEquals(scoreDoc.score, (float) mappings.get(scoreDoc.key.toString()), 0);
assertNotNull(mappings.remove(scoreDoc.key.toString()));
}
}
assertEquals(0, mappings.size());
reader.close();
iw.close();
}
@Test
public void testRealisticKeys() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
LineFileDocs lineFileDocs = new LineFileDocs(random());
int num = Math.min(1000, atLeast(50));
Map<String, Integer> mappings = new HashMap<>();
for (int i = 0; i < num; i++) {
Document document = lineFileDocs.nextDoc();
String title = document.getField("title").stringValue();
int maxLen = Math.min(title.length(), 500);
String prefix = title.substring(0, maxLen);
int weight = random().nextInt(Integer.MAX_VALUE);
Integer prevWeight = mappings.get(prefix);
if (prevWeight == null || prevWeight < weight) {
mappings.put(prefix, weight);
}
Document doc = new Document();
doc.add(new SuggestField("suggest_field", prefix, weight));
iw.addDocument(doc);
if (rarely()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
for (Map.Entry<String, Integer> entry : mappings.entrySet()) {
String title = entry.getKey();
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field", title));
TopSuggestDocs suggest = indexSearcher.suggest(query, mappings.size(), false);
assertTrue(suggest.totalHits.value() > 0);
boolean matched = false;
for (ScoreDoc scoreDoc : suggest.scoreDocs) {
matched = Float.compare(scoreDoc.score, (float) entry.getValue()) == 0;
if (matched) {
break;
}
}
assertTrue("at least one of the entries should have the score", matched);
}
lineFileDocs.close();
reader.close();
iw.close();
}
@Test
public void testThreads() throws Exception {
final Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw =
new RandomIndexWriter(
random(),
dir,
iwcWithSuggestField(analyzer, "suggest_field_1", "suggest_field_2", "suggest_field_3"));
int num = Math.min(1000, atLeast(100));
final String prefix1 = "abc1_";
final String prefix2 = "abc2_";
final String prefix3 = "abc3_";
final Entry[] entries1 = new Entry[num];
final Entry[] entries2 = new Entry[num];
final Entry[] entries3 = new Entry[num];
for (int i = 0; i < num; i++) {
int weight = num - (i + 1);
entries1[i] = new Entry(prefix1 + weight, weight);
entries2[i] = new Entry(prefix2 + weight, weight);
entries3[i] = new Entry(prefix3 + weight, weight);
}
for (int i = 0; i < num; i++) {
Document doc = new Document();
doc.add(new SuggestField("suggest_field_1", prefix1 + i, i));
doc.add(new SuggestField("suggest_field_2", prefix2 + i, i));
doc.add(new SuggestField("suggest_field_3", prefix3 + i, i));
iw.addDocument(doc);
if (rarely()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
int numThreads = TestUtil.nextInt(random(), 2, 7);
Thread[] threads = new Thread[numThreads];
final CyclicBarrier startingGun = new CyclicBarrier(numThreads + 1);
final CopyOnWriteArrayList<Throwable> errors = new CopyOnWriteArrayList<>();
final SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
for (int i = 0; i < threads.length; i++) {
threads[i] =
new Thread() {
@Override
public void run() {
try {
startingGun.await();
PrefixCompletionQuery query =
new PrefixCompletionQuery(analyzer, new Term("suggest_field_1", prefix1));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertSuggestions(suggest, entries1);
query = new PrefixCompletionQuery(analyzer, new Term("suggest_field_2", prefix2));
suggest = indexSearcher.suggest(query, num, false);
assertSuggestions(suggest, entries2);
query = new PrefixCompletionQuery(analyzer, new Term("suggest_field_3", prefix3));
suggest = indexSearcher.suggest(query, num, false);
assertSuggestions(suggest, entries3);
} catch (Throwable e) {
errors.add(e);
}
}
};
threads[i].start();
}
startingGun.await();
for (Thread t : threads) {
t.join();
}
assertTrue(errors.toString(), errors.isEmpty());
reader.close();
iw.close();
}
static class Entry {
final String output;
final float value;
final String context;
final int id;
Entry(String output, float value) {
this(output, null, value);
}
Entry(String output, String context, float value) {
this(output, context, value, -1);
}
Entry(String output, String context, float value, int id) {
this.output = output;
this.value = value;
this.context = context;
this.id = id;
}
@Override
public String toString() {
return "key=" + output + " score=" + value + " context=" + context + " id=" + id;
}
}
static void assertSuggestions(TopDocs actual, Entry... expected) {
SuggestScoreDoc[] suggestScoreDocs = (SuggestScoreDoc[]) actual.scoreDocs;
for (int i = 0; i < Math.min(expected.length, suggestScoreDocs.length); i++) {
SuggestScoreDoc lookupDoc = suggestScoreDocs[i];
String msg =
"Hit "
+ i
+ ": expected: "
+ toString(expected[i])
+ " but actual: "
+ toString(lookupDoc);
assertEquals(msg, expected[i].output, lookupDoc.key.toString());
assertEquals(msg, expected[i].value, lookupDoc.score, 0);
assertEquals(msg, expected[i].context, lookupDoc.context);
}
assertEquals(expected.length, suggestScoreDocs.length);
}
private static String toString(Entry expected) {
return "key:" + expected.output + " score:" + expected.value + " context:" + expected.context;
}
private static String toString(SuggestScoreDoc actual) {
return "key:" + actual.key.toString() + " score:" + actual.score + " context:" + actual.context;
}
static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, String... suggestFields) {
return iwcWithSuggestField(analyzer, asSet(suggestFields));
}
static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, final Set<String> suggestFields) {
IndexWriterConfig iwc = newIndexWriterConfig(random(), analyzer);
iwc.setMergePolicy(newLogMergePolicy());
Codec filterCodec =
new FilterCodec(TestUtil.getDefaultCodec().getName(), TestUtil.getDefaultCodec()) {
final PostingsFormat postingsFormat = new Completion104PostingsFormat();
@Override
public PostingsFormat postingsFormat() {
return new PerFieldPostingsFormat() {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
if (suggestFields.contains(field)) {
return postingsFormat;
}
return ((PerFieldPostingsFormat) delegate.postingsFormat())
.getPostingsFormatForField(field);
}
};
}
};
iwc.setCodec(filterCodec);
return iwc;
}
public static final class PayloadAttrToTypeAttrFilter extends TokenFilter {
private final PayloadAttribute payload = addAttribute(PayloadAttribute.class);
private final TypeAttribute type = addAttribute(TypeAttribute.class);
protected PayloadAttrToTypeAttrFilter(TokenStream input) {
super(input);
}
@Override
public boolean incrementToken() throws IOException {
if (input.incrementToken()) {
// we move them over so we can assert them more easily in the tests
type.setType(payload.getPayload().utf8ToString());
return true;
}
return false;
}
}
}
|
apache/pinot | 33,516 | pinot-segment-local/src/test/java/org/apache/pinot/segment/local/segment/creator/TransformPipelineTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.segment.local.segment.creator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.pinot.spi.config.table.TableConfig;
import org.apache.pinot.spi.data.Schema;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.utils.JsonUtils;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
public class TransformPipelineTest {
private static TableConfig createTestTableConfig()
throws Exception {
return Fixtures.createTableConfig("some.consumer.class", "some.decoder.class");
}
@Test
public void testSingleRow()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow simpleRow = Fixtures.createSingleRow(9527);
TransformPipeline.Result result = pipeline.processRow(simpleRow);
assertNotNull(result);
assertEquals(result.getTransformedRows().size(), 1);
assertEquals(result.getSkippedRowCount(), 0);
assertEquals(result.getTransformedRows().get(0), simpleRow);
}
@Test(expectedExceptions = RuntimeException.class,
expectedExceptionsMessageRegExp = "Caught exception while transforming data type.*")
public void testSingleRowFailure()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow simpleRow = Fixtures.createInvalidSingleRow(9527);
pipeline.processRow(simpleRow);
}
@Test
public void testMultipleRow()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow multipleRow = Fixtures.createMultipleRow(9527);
//noinspection unchecked
List<GenericRow> rows = (List<GenericRow>) multipleRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
TransformPipeline.Result result = pipeline.processRow(multipleRow);
assertNotNull(result);
assertEquals(result.getTransformedRows().size(), rows.size());
assertEquals(result.getSkippedRowCount(), 0);
assertEquals(result.getTransformedRows(), rows);
}
@Test(expectedExceptions = RuntimeException.class,
expectedExceptionsMessageRegExp = "Caught exception while transforming data type.*")
public void testMultipleRowPartialFailure()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow multipleRow = Fixtures.createMultipleRowPartialFailure(9527);
pipeline.processRow(multipleRow);
}
@Test
public void testUnnestFieldWithTransform()
throws Exception {
TableConfig config = JsonUtils.stringToObject(
"{\n"
+ " \"tableName\": \"githubComplexTypeEvents\",\n"
+ " \"tableType\": \"OFFLINE\",\n"
+ " \"tenants\": {\n"
+ " },\n"
+ " \"segmentsConfig\": {\n"
+ " \"segmentPushType\": \"REFRESH\",\n"
+ " \"replication\": \"1\",\n"
+ " \"timeColumnName\": \"created_at_timestamp\"\n"
+ " },\n"
+ " \"tableIndexConfig\": {\n"
+ " \"loadMode\": \"MMAP\"\n"
+ " },\n"
+ " \"ingestionConfig\": {\n"
+ " \"transformConfigs\": [\n"
+ " {\n"
+ " \"columnName\": \"created_at_timestamp\",\n"
+ " \"transformFunction\": \"fromDateTime(created_at, 'yyyy-MM-dd''T''HH:mm:ss''Z''')\"\n"
+ " }\n"
+ " ],\n"
+ " \"complexTypeConfig\": {\n"
+ " \"fieldsToUnnest\": [\n"
+ " \"payload.commits\"\n"
+ " ],\n"
+ " \"prefixesToRename\": {\n"
+ " \"payload.\": \"\"\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"metadata\": {\n"
+ " \"customConfigs\": {\n"
+ " }\n"
+ " }\n"
+ "}\n", TableConfig.class);
Schema schema = Schema.fromString(
"{\n"
+ " \"dimensionFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"id\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"type\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"push_id\",\n"
+ " \"dataType\": \"LONG\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"distinct_size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"ref\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"head\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"before\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.sha\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.name\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.email\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.message\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.distinct\",\n"
+ " \"dataType\": \"BOOLEAN\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.url\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " }\n"
+ " ],\n"
+ " \"dateTimeFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"created_at\",\n"
+ " \"dataType\": \"STRING\",\n"
+ " \"format\": \"1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd'T'HH:mm:ss'Z'\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"created_at_timestamp\",\n"
+ " \"dataType\": \"TIMESTAMP\",\n"
+ " \"format\": \"1:MILLISECONDS:TIMESTAMP\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " }\n"
+ " ],\n"
+ " \"schemaName\": \"githubComplexTypeEvents\"\n"
+ "}\n");
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow sampleRow = new GenericRow();
sampleRow.putValue("id", "7044874109");
sampleRow.putValue("type", "PushEvent");
sampleRow.putValue("actor", Map.of(
"id", 18542751,
"login", "LimeVista",
"display_login", "LimeVista",
"gravatar_id", "",
"url", "https://api.github.com/users/LimeVista",
"avatar_url", "https://avatars.githubusercontent.com/u/18542751?"
));
sampleRow.putValue("repo", Map.of(
"id", 115911530,
"name", "LimeVista/Tapes",
"url", "https://api.github.com/repos/LimeVista/Tapes"
));
sampleRow.putValue("payload", Map.of(
"push_id", "2226018068",
"size", 1,
"distinct_size", 1,
"ref", "refs/heads/master",
"head", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"before", "892d872c5d3f24cc6837900c9f4618dc2fe92930",
"commits", Map.of(
"sha", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"author", Map.of(
"name", "Lime",
"email", "4cc153d999e24274955157fc813e6f92f821525d@outlook.com"),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774"
)
));
sampleRow.putValue("public", true);
sampleRow.putValue("created_at", "2018-01-01T11:00:00Z");
TransformPipeline.Result result = pipeline.processRow(sampleRow);
List<GenericRow> transformedRows = result.getTransformedRows();
assertEquals(transformedRows.size(), 1);
GenericRow transformedRow = transformedRows.get(0);
assertEquals(transformedRow.getValue("created_at_timestamp"), 1514804400000L);
assertEquals(transformedRow.getValue("commits.author.email"),
"4cc153d999e24274955157fc813e6f92f821525d@outlook.com");
assertEquals(transformedRow.getValue("commits.author.name"), "Lime");
assertEquals(transformedRow.getValue("commits.message"),
"Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore");
assertEquals(transformedRow.getValue("commits.sha"), "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("commits.distinct"), 1);
assertEquals(transformedRow.getValue("commits.url"),
"https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("ref"), "refs/heads/master");
assertEquals(transformedRow.getValue("distinct_size"), 1);
assertEquals(transformedRow.getValue("head"), "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("push_id"), 2226018068L);
assertEquals(transformedRow.getValue("size"), 1);
assertEquals(transformedRow.getValue("before"), "892d872c5d3f24cc6837900c9f4618dc2fe92930");
}
@Test
public void testNotAddingOriginalValueUnnestFieldWithTransform()
throws Exception {
TableConfig config = JsonUtils.stringToObject(
"{\n"
+ " \"tableName\": \"githubComplexTypeEvents\",\n"
+ " \"tableType\": \"OFFLINE\",\n"
+ " \"tenants\": {\n"
+ " },\n"
+ " \"segmentsConfig\": {\n"
+ " \"segmentPushType\": \"REFRESH\",\n"
+ " \"replication\": \"1\",\n"
+ " \"timeColumnName\": \"created_at_timestamp\"\n"
+ " },\n"
+ " \"tableIndexConfig\": {\n"
+ " \"loadMode\": \"MMAP\"\n"
+ " },\n"
+ " \"ingestionConfig\": {\n"
+ " \"transformConfigs\": [\n"
+ " {\n"
+ " \"columnName\": \"created_at_timestamp\",\n"
+ " \"transformFunction\": \"fromDateTime(created_at, 'yyyy-MM-dd''T''HH:mm:ss''Z''')\"\n"
+ " }\n"
+ " ],\n"
+ " \"complexTypeConfig\": {\n"
+ " \"fieldsToUnnest\": [\n"
+ " \"payload.commits\"\n"
+ " ],\n"
+ " \"prefixesToRename\": {\n"
+ " \"payload.\": \"\"\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"metadata\": {\n"
+ " \"customConfigs\": {\n"
+ " }\n"
+ " }\n"
+ "}\n", TableConfig.class);
Schema schema = Schema.fromString(
"{\n"
+ " \"dimensionFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"id\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"type\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"push_id\",\n"
+ " \"dataType\": \"LONG\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"distinct_size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"ref\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"head\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"before\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.sha\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.name\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.email\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.message\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.distinct\",\n"
+ " \"dataType\": \"BOOLEAN\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.url\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " }\n"
+ " ],\n"
+ " \"dateTimeFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"created_at\",\n"
+ " \"dataType\": \"STRING\",\n"
+ " \"format\": \"1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd'T'HH:mm:ss'Z'\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"created_at_timestamp\",\n"
+ " \"dataType\": \"TIMESTAMP\",\n"
+ " \"format\": \"1:MILLISECONDS:TIMESTAMP\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " }\n"
+ " ],\n"
+ " \"schemaName\": \"githubComplexTypeEvents\"\n"
+ "}\n");
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow sampleRow = new GenericRow();
sampleRow.putValue("id", "7044874109");
sampleRow.putValue("type", "PushEvent");
sampleRow.putValue("actor", Map.of(
"id", 18542751,
"login", "LimeVista",
"display_login", "LimeVista",
"gravatar_id", "",
"url", "https://api.github.com/users/LimeVista",
"avatar_url", "https://avatars.githubusercontent.com/u/18542751?"
));
sampleRow.putValue("repo", Map.of(
"id", 115911530,
"name", "LimeVista/Tapes",
"url", "https://api.github.com/repos/LimeVista/Tapes"
));
sampleRow.putValue("payload", Map.of(
"push_id", "2226018068",
"size", 1,
"distinct_size", 1,
"ref", "refs/heads/master",
"head", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"before", "892d872c5d3f24cc6837900c9f4618dc2fe92930",
"commits", new ArrayList<>(List.of(new HashMap<>(Map.of(
"sha", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"author", new HashMap<>(Map.of(
"name", "Lime",
"email", "4cc153d999e24274955157fc813e6f92f821525d@outlook.com")),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774"
)), new HashMap<>(Map.of(
"sha", "410cb4ac1e6ca1774c5fc8b32a9ead1eba315d97",
"author", new HashMap<>(Map.of(
"name", "Lime",
"email", "55157fc813e6f92f821525d4cc153d999e242749@outlook.com")),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/410cb4ac1e6ca1774c5fc8b32a9ead1eba315d97"
))))
));
sampleRow.putValue("public", true);
sampleRow.putValue("created_at", "2018-01-01T11:00:00Z");
TransformPipeline.Result result = pipeline.processRow(sampleRow);
List<GenericRow> transformedRows = result.getTransformedRows();
assertEquals(transformedRows.size(), 2);
GenericRow transformedRow = transformedRows.get(0);
// the unnested field should not be present in the transformed row
assertNull(transformedRow.getValue("commits"));
}
@Test
public void testAddingOriginalValueUnnestFieldWithTransform()
throws Exception {
TableConfig config = JsonUtils.stringToObject(
"{\n"
+ " \"tableName\": \"githubComplexTypeEvents\",\n"
+ " \"tableType\": \"OFFLINE\",\n"
+ " \"tenants\": {\n"
+ " },\n"
+ " \"segmentsConfig\": {\n"
+ " \"segmentPushType\": \"REFRESH\",\n"
+ " \"replication\": \"1\",\n"
+ " \"timeColumnName\": \"created_at_timestamp\"\n"
+ " },\n"
+ " \"tableIndexConfig\": {\n"
+ " \"loadMode\": \"MMAP\"\n"
+ " },\n"
+ " \"ingestionConfig\": {\n"
+ " \"transformConfigs\": [\n"
+ " {\n"
+ " \"columnName\": \"created_at_timestamp\",\n"
+ " \"transformFunction\": \"fromDateTime(created_at, 'yyyy-MM-dd''T''HH:mm:ss''Z''')\"\n"
+ " }\n"
+ " ],\n"
+ " \"complexTypeConfig\": {\n"
+ " \"fieldsToUnnest\": [\n"
+ " \"payload.commits\"\n"
+ " ],\n"
+ " \"prefixesToRename\": {\n"
+ " \"payload.\": \"\"\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"metadata\": {\n"
+ " \"customConfigs\": {\n"
+ " }\n"
+ " }\n"
+ "}\n", TableConfig.class);
Schema schema = Schema.fromString(
"{\n"
+ " \"dimensionFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"id\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"type\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"push_id\",\n"
+ " \"dataType\": \"LONG\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"distinct_size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"ref\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"head\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"before\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.sha\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.name\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.email\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.message\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.distinct\",\n"
+ " \"dataType\": \"BOOLEAN\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.url\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " }\n"
+ " ],\n"
+ " \"dateTimeFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"created_at\",\n"
+ " \"dataType\": \"STRING\",\n"
+ " \"format\": \"1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd'T'HH:mm:ss'Z'\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"created_at_timestamp\",\n"
+ " \"dataType\": \"TIMESTAMP\",\n"
+ " \"format\": \"1:MILLISECONDS:TIMESTAMP\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " }\n"
+ " ],\n"
+ " \"schemaName\": \"githubComplexTypeEvents\"\n"
+ "}\n");
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow sampleRow = new GenericRow();
sampleRow.putValue("id", "7044874109");
sampleRow.putValue("type", "PushEvent");
sampleRow.putValue("actor", Map.of(
"id", 18542751,
"login", "LimeVista",
"display_login", "LimeVista",
"gravatar_id", "",
"url", "https://api.github.com/users/LimeVista",
"avatar_url", "https://avatars.githubusercontent.com/u/18542751?"
));
sampleRow.putValue("repo", Map.of(
"id", 115911530,
"name", "LimeVista/Tapes",
"url", "https://api.github.com/repos/LimeVista/Tapes"
));
sampleRow.putValue("payload", Map.of(
"push_id", "2226018068",
"size", 1,
"distinct_size", 1,
"ref", "refs/heads/master",
"head", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"before", "892d872c5d3f24cc6837900c9f4618dc2fe92930",
"commits", new ArrayList<>(List.of(new HashMap<>(Map.of(
"sha", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"author", new HashMap<>(Map.of(
"name", "Lime",
"email", "4cc153d999e24274955157fc813e6f92f821525d@outlook.com")),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774"
)), new HashMap<>(Map.of(
"sha", "410cb4ac1e6ca1774c5fc8b32a9ead1eba315d97",
"author", new HashMap<>(Map.of(
"name", "Lime",
"email", "55157fc813e6f92f821525d4cc153d999e242749@outlook.com")),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/410cb4ac1e6ca1774c5fc8b32a9ead1eba315d97"
))))
));
sampleRow.putValue("public", true);
sampleRow.putValue("created_at", "2018-01-01T11:00:00Z");
TransformPipeline.Result result = pipeline.processRow(sampleRow);
List<GenericRow> transformedRows = result.getTransformedRows();
assertEquals(transformedRows.size(), 2);
GenericRow transformedRow = transformedRows.get(0);
// the unnested field should be present in the transformed row, since we have the unnest field in the schema
assertNotNull(transformedRow.getValue("commits"));
}
@Test
public void testRenameFieldWithTransform()
throws Exception {
TableConfig config = JsonUtils.stringToObject(
"{\n"
+ " \"tableName\": \"githubComplexTypeEvents\",\n"
+ " \"tableType\": \"OFFLINE\",\n"
+ " \"tenants\": {\n"
+ " },\n"
+ " \"segmentsConfig\": {\n"
+ " \"segmentPushType\": \"REFRESH\",\n"
+ " \"replication\": \"1\",\n"
+ " \"timeColumnName\": \"created_at_timestamp\"\n"
+ " },\n"
+ " \"tableIndexConfig\": {\n"
+ " \"loadMode\": \"MMAP\"\n"
+ " },\n"
+ " \"ingestionConfig\": {\n"
+ " \"transformConfigs\": [\n"
+ " {\n"
+ " \"columnName\": \"created_at_timestamp\",\n"
+ " \"transformFunction\": \"fromDateTime(created_at, 'yyyy-MM-dd''T''HH:mm:ss''Z''')\"\n"
+ " }\n"
+ " ],\n"
+ " \"complexTypeConfig\": {\n"
+ " \"prefixesToRename\": {\n"
+ " \"payload.\": \"\",\n"
+ " \"actor.\": \"a.\",\n"
+ " \"repo.\": \"r.\"\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"metadata\": {\n"
+ " \"customConfigs\": {\n"
+ " }\n"
+ " }\n"
+ "}\n", TableConfig.class);
Schema schema = Schema.fromString(
"{\n"
+ " \"dimensionFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"id\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"type\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"push_id\",\n"
+ " \"dataType\": \"LONG\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"distinct_size\",\n"
+ " \"dataType\": \"INT\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"ref\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"head\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"before\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.sha\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.name\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.author.email\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.message\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.distinct\",\n"
+ " \"dataType\": \"BOOLEAN\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"commits.url\",\n"
+ " \"dataType\": \"STRING\"\n"
+ " }\n"
+ " ],\n"
+ " \"dateTimeFieldSpecs\": [\n"
+ " {\n"
+ " \"name\": \"created_at\",\n"
+ " \"dataType\": \"STRING\",\n"
+ " \"format\": \"1:SECONDS:SIMPLE_DATE_FORMAT:yyyy-MM-dd'T'HH:mm:ss'Z'\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"created_at_timestamp\",\n"
+ " \"dataType\": \"TIMESTAMP\",\n"
+ " \"format\": \"1:MILLISECONDS:TIMESTAMP\",\n"
+ " \"granularity\": \"1:SECONDS\"\n"
+ " }\n"
+ " ],\n"
+ " \"schemaName\": \"githubComplexTypeEvents\"\n"
+ "}\n");
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow sampleRow = new GenericRow();
sampleRow.putValue("id", "7044874109");
sampleRow.putValue("type", "PushEvent");
sampleRow.putValue("actor", Map.of(
"id", 18542751,
"login", "LimeVista",
"display_login", "LimeVista",
"gravatar_id", "",
"url", "https://api.github.com/users/LimeVista",
"avatar_url", "https://avatars.githubusercontent.com/u/18542751?"
));
sampleRow.putValue("repo", Map.of(
"id", 115911530,
"name", "LimeVista/Tapes",
"url", "https://api.github.com/repos/LimeVista/Tapes"
));
sampleRow.putValue("payload", Map.of(
"push_id", "2226018068",
"size", 1,
"distinct_size", 1,
"ref", "refs/heads/master",
"head", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"before", "892d872c5d3f24cc6837900c9f4618dc2fe92930",
"commits", Map.of(
"sha", "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774",
"author", Map.of(
"name", "Lime",
"email", "4cc153d999e24274955157fc813e6f92f821525d@outlook.com"),
"message", "Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore",
"distinct", true,
"url", "https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774"
)
));
sampleRow.putValue("public", true);
sampleRow.putValue("created_at", "2018-01-01T11:00:00Z");
TransformPipeline.Result result = pipeline.processRow(sampleRow);
List<GenericRow> transformedRows = result.getTransformedRows();
assertEquals(transformedRows.size(), 1);
GenericRow transformedRow = transformedRows.get(0);
assertEquals(transformedRow.getValue("created_at_timestamp"), 1514804400000L);
assertEquals(transformedRow.getValue("commits.author.email"),
"4cc153d999e24274955157fc813e6f92f821525d@outlook.com");
assertEquals(transformedRow.getValue("commits.author.name"), "Lime");
assertEquals(transformedRow.getValue("commits.message"),
"Merge branch 'master' of https://github.com/LimeVista/Tapes\\n\\n# Conflicts:\\n#\\t.gitignore");
assertEquals(transformedRow.getValue("commits.sha"), "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("commits.distinct"), 1);
assertEquals(transformedRow.getValue("commits.url"),
"https://api.github.com/repos/LimeVista/Tapes/commits/c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("ref"), "refs/heads/master");
assertEquals(transformedRow.getValue("distinct_size"), 1);
assertEquals(transformedRow.getValue("head"), "c5fc8b32a9ead1eba315d97410cb4ac1e6ca1774");
assertEquals(transformedRow.getValue("push_id"), 2226018068L);
assertEquals(transformedRow.getValue("size"), 1);
assertEquals(transformedRow.getValue("before"), "892d872c5d3f24cc6837900c9f4618dc2fe92930");
assertEquals(transformedRow.getValue("a.id"), 18542751);
assertEquals(transformedRow.getValue("a.login"), "LimeVista");
assertEquals(transformedRow.getValue("a.display_login"), "LimeVista");
assertEquals(transformedRow.getValue("a.gravatar_id"), "");
assertEquals(transformedRow.getValue("a.url"), "https://api.github.com/users/LimeVista");
assertEquals(transformedRow.getValue("a.avatar_url"), "https://avatars.githubusercontent.com/u/18542751?");
assertEquals(transformedRow.getValue("r.id"), 115911530);
assertEquals(transformedRow.getValue("r.name"), "LimeVista/Tapes");
assertEquals(transformedRow.getValue("r.url"), "https://api.github.com/repos/LimeVista/Tapes");
}
}
|
hibernate/hibernate-orm | 35,147 | hibernate-core/src/main/java/org/hibernate/type/spi/TypeConfiguration.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.type.spi;
import java.io.InvalidObjectException;
import java.io.Serial;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.OffsetTime;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.Incubating;
import org.hibernate.Internal;
import org.hibernate.SessionFactory;
import org.hibernate.SessionFactoryObserver;
import org.hibernate.query.sqm.SqmBindableType;
import org.hibernate.type.TimeZoneStorageStrategy;
import org.hibernate.boot.cfgxml.spi.CfgXmlAccessService;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.BasicTypeRegistration;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.SessionFactoryRegistry;
import org.hibernate.jpa.spi.JpaCompliance;
import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.MappingModelExpressible;
import org.hibernate.metamodel.model.domain.internal.ArrayTupleType;
import org.hibernate.query.internal.QueryHelper;
import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.SqmExpressible;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.expression.SqmParameter;
import org.hibernate.resource.beans.internal.FallbackBeanInstanceProducer;
import org.hibernate.resource.beans.spi.ManagedBeanRegistry;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.QueryParameterJavaObjectType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.MutabilityPlan;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.format.FormatMapper;
import org.hibernate.type.internal.BasicTypeImpl;
import org.hibernate.type.internal.ParameterizedTypeImpl;
import jakarta.persistence.TemporalType;
import static org.hibernate.id.uuid.LocalObjectUuidHelper.generateLocalObjectUuid;
import static org.hibernate.query.sqm.internal.TypecheckUtil.isNumberArray;
/**
* Each instance defines a set of {@linkplain Type types} available in a given
* persistence unit, and isolates them from other configurations.
* <p>
* Note that each instance of {@code Type} is inherently "scoped" to a
* {@code TypeConfiguration}. We always obtain a reference to a {@code Type}
* via the {@code TypeConfiguration} associated with the current persistence
* unit.
* <p>
* On the other hand, a {@code Type} does not inherently have access to its
* parent {@code TypeConfiguration} since extensions may contribute instances
* of {@code Type}, via {@link org.hibernate.boot.model.TypeContributions},
* for example, and the instantiation of such instances occurs outside the
* control of Hibernate.
* <p>
* In particular, a custom {@link org.hibernate.boot.model.TypeContributor}
* may contribute types to a {@code TypeConfiguration}.
* <p>
* If a {@code Type} requires access to the parent {@code TypeConfiguration},
* it should implement {@link TypeConfigurationAware}.
*
* @author Steve Ebersole
*
* @since 5.3
*
* @see org.hibernate.boot.model.TypeContributor
* @see org.hibernate.boot.model.TypeContributions
*/
@Incubating
public class TypeConfiguration implements SessionFactoryObserver, Serializable {
// private static final CoreMessageLogger LOG = messageLogger( Scope.class );
private final String uuid = generateLocalObjectUuid();
private final Scope scope;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// things available during both boot and runtime lifecycle phases
private final transient JavaTypeRegistry javaTypeRegistry;
private final transient JdbcTypeRegistry jdbcTypeRegistry;
private final transient DdlTypeRegistry ddlTypeRegistry;
private final transient BasicTypeRegistry basicTypeRegistry;
private final transient Map<Integer, Set<String>> jdbcToHibernateTypeContributionMap = new HashMap<>();
public TypeConfiguration() {
scope = new Scope( this );
javaTypeRegistry = new JavaTypeRegistry( this );
jdbcTypeRegistry = new JdbcTypeRegistry( this );
ddlTypeRegistry = new DdlTypeRegistry( this );
basicTypeRegistry = new BasicTypeRegistry( this );
StandardBasicTypes.prime( this );
}
public String getUuid() {
return uuid;
}
public BasicTypeRegistry getBasicTypeRegistry() {
return basicTypeRegistry;
}
public JavaTypeRegistry getJavaTypeRegistry() {
return javaTypeRegistry;
}
public JdbcTypeRegistry getJdbcTypeRegistry() {
return jdbcTypeRegistry;
}
public DdlTypeRegistry getDdlTypeRegistry() {
return ddlTypeRegistry;
}
public JdbcTypeIndicators getCurrentBaseSqlTypeIndicators() {
return scope;
}
public Map<Integer, Set<String>> getJdbcToHibernateTypeContributionMap() {
return jdbcToHibernateTypeContributionMap;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Scoping
/**
* Obtain the {@link MetadataBuildingContext} currently scoping this {@code TypeConfiguration}.
*
* @apiNote Throws an exception if the {@code TypeConfiguration} is no longer scoped to the
* {@link MetadataBuildingContext}. See {@link Scope} for more details regarding the
* stages a {@code TypeConfiguration} passes through.
*
* @return The {@link MetadataBuildingContext}
*
* @deprecated This operation is not very typesafe, and we're migrating away from its use
*/
@Deprecated(since = "6.2")
public MetadataBuildingContext getMetadataBuildingContext() {
return scope.getMetadataBuildingContext();
}
/**
* Scope this {@code TypeConfiguration} to the given {@link MetadataBuildingContext}.
*
* @implNote The given factory is not yet fully-initialized!
*
* @param metadataBuildingContext a {@link MetadataBuildingContext}
*/
public void scope(MetadataBuildingContext metadataBuildingContext) {
// LOG.tracef( "Scoping TypeConfiguration [%s] to MetadataBuildingContext [%s]", this, metadataBuildingContext );
scope.setMetadataBuildingContext( metadataBuildingContext );
}
/**
* Scope this {@code TypeConfiguration} to the given {@link SessionFactory}.
*
* @implNote The given factory is not yet fully-initialized!
*
* @param sessionFactory a {@link SessionFactory} that is in a very fragile state
*/
public void scope(SessionFactoryImplementor sessionFactory) {
// LOG.tracef( "Scoping TypeConfiguration [%s] to SessionFactoryImplementor [%s]", this, sessionFactory );
if ( scope.getMetadataBuildingContext() == null ) {
throw new IllegalStateException( "MetadataBuildingContext not known" );
}
scope.setSessionFactory( sessionFactory );
sessionFactory.addObserver( this );
}
/**
* Obtain the {@link SessionFactory} currently scoping this {@code TypeConfiguration}.
*
* @apiNote Throws an exception if the {@code TypeConfiguration} is not yet scoped to
* a factory. See {@link Scope} for more details regarding the stages a
* {@code TypeConfiguration} passes through (this is a "runtime stage").
*
* @return The {@link SessionFactory} to which this {@code TypeConfiguration} is scoped
*
* @throws HibernateException if the {@code TypeConfiguration} is not currently scoped
* to a {@link SessionFactory} (in a "runtime stage").
*
* @deprecated This operation is not very typesafe, and we're migrating away from its use
*/
@Deprecated(since = "6.2")
public SessionFactoryImplementor getSessionFactory() {
return scope.getSessionFactory();
}
/**
* Obtain the {@link ServiceRegistry} scoped to this {@code TypeConfiguration}.
*
* @apiNote The current {@link Scope} will determine from where the {@link ServiceRegistry}
* is obtained.
*
* @return The {@link ServiceRegistry} for the current scope
*
* @deprecated This simply isn't a very sensible place to hang the {@link ServiceRegistry}
*/
@Deprecated(since = "6.2")
public ServiceRegistry getServiceRegistry() {
return scope.getServiceRegistry();
}
/**
* Obtain the {@link JpaCompliance} setting.
*
* @deprecated No longer used
*/
@Deprecated(since = "7.0", forRemoval = true)
public JpaCompliance getJpaCompliance() {
return scope.getJpaCompliance();
}
/**
* Workaround for an issue faced in {@link org.hibernate.type.EntityType#getReturnedClass()}.
*/
@Internal
public Class<?> entityClassForEntityName(String entityName) {
return scope.entityClassForEntityName(entityName);
}
@Override
public void sessionFactoryCreated(SessionFactory factory) {
// Instead of allowing scope#setSessionFactory to influence this, we use the SessionFactoryObserver callback
// to handle this, allowing any SessionFactory constructor code to be able to continue to have access to the
// MetadataBuildingContext through TypeConfiguration until this callback is fired.
// LOG.tracef( "Handling #sessionFactoryCreated from [%s] for TypeConfiguration", factory );
scope.setMetadataBuildingContext( null );
}
@Override
public void sessionFactoryClosed(SessionFactory factory) {
// LOG.tracef( "Handling #sessionFactoryClosed from [%s] for TypeConfiguration", factory );
scope.unsetSessionFactory( factory );
// todo (6.0) : finish this
// release Database, descriptor Maps, etc... things that are only
// valid while the TypeConfiguration is scoped to SessionFactory
}
public void addBasicTypeRegistrationContributions(List<BasicTypeRegistration> contributions) {
for ( var basicTypeRegistration : contributions ) {
addBasicTypeRegistration( basicTypeRegistration, basicTypeRegistration.getBasicType() );
}
}
private <T> void addBasicTypeRegistration(BasicTypeRegistration basicTypeRegistration, BasicType<T> basicType) {
basicTypeRegistry.register(
basicType,
basicTypeRegistration.getRegistrationKeys()
);
javaTypeRegistry.resolveDescriptor(
basicType.getJavaType(),
basicType::getJavaTypeDescriptor
);
jdbcToHibernateTypeContributionMap.computeIfAbsent(
basicType.getJdbcType().getDefaultSqlTypeCode(),
k -> new HashSet<>()
).add( basicType.getName() );
}
/**
* Understands the following target type names for the {@code cast()} function:
* <ul>
* <li>{@code String}
* <li>{@code Character}
* <li>{@code Byte}, {@code Short}, {@code Integer}, {@code Long}
* <li>{@code Float}, {@code Double}
* <li>{@code Time}, {@code Date}, {@code Timestamp}
* <li>{@code LocalDate}, {@code LocalTime}, {@code LocalDateTime}
* <li>{@code BigInteger}
* <li>{@code BigDecimal}
* <li>{@code Binary}
* <li>{@code Boolean}
* (fragile, not aware of encoding to character via
* {@link org.hibernate.type.CharBooleanConverter})
* </ul>
* <p>
* The type names are not case-sensitive.
*/
public BasicType<?> resolveCastTargetType(String name) {
switch ( name.toLowerCase() ) {
case "string": return getBasicTypeForJavaType( String.class );
case "character": return getBasicTypeForJavaType( Character.class );
case "byte": return getBasicTypeForJavaType( Byte.class );
case "short": return getBasicTypeForJavaType( Short.class );
case "integer": return getBasicTypeForJavaType( Integer.class );
case "long": return getBasicTypeForJavaType( Long.class );
case "float": return getBasicTypeForJavaType( Float.class );
case "double": return getBasicTypeForJavaType( Double.class );
case "time": return getBasicTypeForJavaType( Time.class );
case "date": return getBasicTypeForJavaType( java.sql.Date.class );
case "timestamp": return getBasicTypeForJavaType( Timestamp.class );
case "localtime": return getBasicTypeForJavaType( LocalTime.class );
case "localdate": return getBasicTypeForJavaType( LocalDate.class );
case "localdatetime": return getBasicTypeForJavaType( LocalDateTime.class );
case "offsetdatetime": return getBasicTypeForJavaType( OffsetDateTime.class );
case "zoneddatetime": return getBasicTypeForJavaType( ZonedDateTime.class );
case "biginteger": return getBasicTypeForJavaType( BigInteger.class );
case "bigdecimal": return getBasicTypeForJavaType( BigDecimal.class );
case "duration": return getBasicTypeForJavaType( Duration.class );
case "instant": return getBasicTypeForJavaType( Instant.class );
case "binary": return getBasicTypeForJavaType( byte[].class );
//this one is very fragile ... works well for BIT or BOOLEAN columns only
//works OK, I suppose, for integer columns, but not at all for char columns
case "boolean": return getBasicTypeForJavaType( Boolean.class );
case "truefalse": return basicTypeRegistry.getRegisteredType( StandardBasicTypes.TRUE_FALSE.getName() );
case "yesno": return basicTypeRegistry.getRegisteredType( StandardBasicTypes.YES_NO.getName() );
case "numericboolean": return basicTypeRegistry.getRegisteredType( StandardBasicTypes.NUMERIC_BOOLEAN.getName() );
case "json": return basicTypeRegistry.resolve( Object.class, SqlTypes.JSON );
case "xml": return basicTypeRegistry.resolve( Object.class, SqlTypes.SQLXML );
//really not sure about this one - it works well for casting from binary
//to UUID, but people will want to use it to cast from varchar, and that
//won't work at all without some special casing in the Dialects
// case "uuid": return getBasicTypeForJavaType( UUID.class );
default: {
final BasicType<?> registeredBasicType = basicTypeRegistry.getRegisteredType( name );
if ( registeredBasicType != null ) {
return registeredBasicType;
}
try {
final Class<?> javaTypeClass = scope.getClassLoaderService().classForName( name );
final var jtd = javaTypeRegistry.resolveDescriptor( javaTypeClass );
final var jdbcType = jtd.getRecommendedJdbcType( getCurrentBaseSqlTypeIndicators() );
return basicTypeRegistry.resolve( jtd, jdbcType );
}
catch ( Exception ignore ) {
}
throw new HibernateException( "unrecognized cast target type: " + name );
}
}
}
/**
* Encapsulation of lifecycle concerns of a {@link TypeConfiguration}:
* <ol>
* <li>
* "Boot" is where the {@link TypeConfiguration} is first built as
* {@linkplain org.hibernate.boot.model the boot model} of the domain
* model is converted into {@linkplain org.hibernate.metamodel.model
* the runtime model}. During this phase,
* {@link #getMetadataBuildingContext()} is accessible but
* {@link #getSessionFactory} throws an exception.
* </li>
* <li>
* "Runtime" is where the runtime model is accessible. During this
* phase, {@link #getSessionFactory()} is accessible but
* {@link #getMetadataBuildingContext()} throws an exception.
* </li>
* <li>
* "Sunset" happens after the {@link SessionFactory} has been closed.
* Both {@link #getSessionFactory()} and {@link #getMetadataBuildingContext()}
* throw exceptions.
* </li>
* </ol>
* <p>
* On the other hand, the {@linkplain #getServiceRegistry() service registry}
* is available during both "Boot" and "Runtime" phases.
* <p>
* Each stage or phase is considered a scope for the {@link TypeConfiguration}.
*/
private static class Scope implements JdbcTypeIndicators, Serializable {
private final TypeConfiguration typeConfiguration;
private transient MetadataBuildingContext metadataBuildingContext;
private transient SessionFactoryImplementor sessionFactory;
private boolean allowExtensionsInCdi;
private String sessionFactoryName;
private String sessionFactoryUuid;
@Override
public TypeConfiguration getTypeConfiguration() {
return typeConfiguration;
}
@Override
public boolean isPreferJavaTimeJdbcTypesEnabled() {
return sessionFactory == null
? metadataBuildingContext.isPreferJavaTimeJdbcTypesEnabled()
: sessionFactory.getSessionFactoryOptions().isPreferJavaTimeJdbcTypesEnabled();
}
@Override
public boolean isPreferNativeEnumTypesEnabled() {
return sessionFactory == null
? metadataBuildingContext.isPreferNativeEnumTypesEnabled()
: sessionFactory.getSessionFactoryOptions().isPreferNativeEnumTypesEnabled();
}
@Override
public TimeZoneStorageStrategy getDefaultTimeZoneStorageStrategy() {
return sessionFactory == null
? metadataBuildingContext.getBuildingOptions().getDefaultTimeZoneStorage()
: sessionFactory.getSessionFactoryOptions().getDefaultTimeZoneStorageStrategy();
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return sessionFactory == null
? metadataBuildingContext.getPreferredSqlTypeCodeForBoolean()
: sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForBoolean();
}
@Override
public int getPreferredSqlTypeCodeForDuration() {
return sessionFactory == null
? metadataBuildingContext.getPreferredSqlTypeCodeForDuration()
: sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForDuration();
}
@Override
public int getPreferredSqlTypeCodeForUuid() {
return sessionFactory == null
? metadataBuildingContext.getPreferredSqlTypeCodeForUuid()
: sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForUuid();
}
@Override
public int getPreferredSqlTypeCodeForInstant() {
return sessionFactory == null
? metadataBuildingContext.getPreferredSqlTypeCodeForInstant()
: sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForInstant();
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return sessionFactory == null
? metadataBuildingContext.getPreferredSqlTypeCodeForArray()
: sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForArray();
}
@Override
public Dialect getDialect() {
return sessionFactory == null
? metadataBuildingContext.getMetadataCollector().getDatabase().getDialect()
: sessionFactory.getJdbcServices().getDialect();
}
@Override
public boolean preferJdbcDatetimeTypes() {
return sessionFactory != null
&& sessionFactory.getSessionFactoryOptions().isPreferJdbcDatetimeTypesInNativeQueriesEnabled();
}
@Override
public boolean isXmlFormatMapperLegacyFormatEnabled() {
if ( metadataBuildingContext != null ) {
return metadataBuildingContext.getBuildingOptions().isXmlFormatMapperLegacyFormatEnabled();
}
else if ( sessionFactory != null ) {
return sessionFactory.getSessionFactoryOptions().isXmlFormatMapperLegacyFormatEnabled();
}
else {
return false;
}
}
public ClassLoaderService getClassLoaderService() {
return sessionFactory == null
? metadataBuildingContext.getBootstrapContext().getClassLoaderService()
: sessionFactory.getClassLoaderService();
}
public ManagedBeanRegistry getManagedBeanRegistry() {
return sessionFactory == null
? metadataBuildingContext.getBootstrapContext().getManagedBeanRegistry()
: sessionFactory.getManagedBeanRegistry();
}
private Scope(TypeConfiguration typeConfiguration) {
this.typeConfiguration = typeConfiguration;
}
private MetadataBuildingContext getMetadataBuildingContext() {
if ( metadataBuildingContext == null ) {
throw new HibernateException( "TypeConfiguration is not currently scoped to MetadataBuildingContext" );
}
return metadataBuildingContext;
}
private ServiceRegistry getServiceRegistry() {
if ( metadataBuildingContext != null ) {
return metadataBuildingContext.getBootstrapContext().getServiceRegistry();
}
else if ( sessionFactory != null ) {
return sessionFactory.getServiceRegistry();
}
else {
throw new AssertionFailure( "No service registry available" );
}
}
private JpaCompliance getJpaCompliance() {
if ( metadataBuildingContext != null ) {
return metadataBuildingContext.getBootstrapContext().getJpaCompliance();
}
else if ( sessionFactory != null ) {
return sessionFactory.getSessionFactoryOptions().getJpaCompliance();
}
return null;
}
private void setMetadataBuildingContext(MetadataBuildingContext context) {
metadataBuildingContext = context;
if ( context != null ) {
allowExtensionsInCdi = context.getBuildingOptions().isAllowExtensionsInCdi();
}
}
private SessionFactoryImplementor getSessionFactory() {
if ( sessionFactory == null ) {
if ( sessionFactoryName == null && sessionFactoryUuid == null ) {
throw new HibernateException( "TypeConfiguration was not yet scoped to SessionFactory" );
}
sessionFactory =
SessionFactoryRegistry.INSTANCE
.findSessionFactory( sessionFactoryUuid, sessionFactoryName );
if ( sessionFactory == null ) {
throw new HibernateException(
"Could not find a SessionFactory [uuid=" + sessionFactoryUuid + ",name=" + sessionFactoryName + "]"
);
}
}
return sessionFactory;
}
/**
* Used by {@link TypeConfiguration} scoping.
*
* @param factory The {@link SessionFactory} to which the {@link TypeConfiguration} is being bound
*/
private void setSessionFactory(SessionFactoryImplementor factory) {
if ( sessionFactory != null ) {
// LOG.scopingTypesToSessionFactoryAfterAlreadyScoped( sessionFactory, factory );
throw new IllegalStateException( "TypeConfiguration was already scoped to SessionFactory: "
+ sessionFactory.getUuid() );
}
else {
sessionFactoryUuid = factory.getUuid();
sessionFactoryName = getFactoryName( factory );
}
sessionFactory = factory;
}
private static String getFactoryName(SessionFactoryImplementor factory) {
final String factoryName = factory.getSessionFactoryOptions().getSessionFactoryName();
if ( factoryName == null ) {
final var cfgXmlAccessService =
factory.getServiceRegistry()
.requireService( CfgXmlAccessService.class );
final var aggregatedConfig = cfgXmlAccessService.getAggregatedConfig();
return aggregatedConfig == null ? null : aggregatedConfig.getSessionFactoryName();
}
else {
return factoryName;
}
}
private void unsetSessionFactory(SessionFactory factory) {
// LOG.tracef( "Un-scoping TypeConfiguration [%s] from SessionFactory [%s]", this, factory );
sessionFactory = null;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Custom serialization hook
@Serial
private Object readResolve() throws InvalidObjectException {
if ( sessionFactory == null ) {
if ( sessionFactoryName != null || sessionFactoryUuid != null ) {
sessionFactory =
SessionFactoryRegistry.INSTANCE
.findSessionFactory( sessionFactoryUuid, sessionFactoryName );
if ( sessionFactory == null ) {
throw new HibernateException( "Could not find a SessionFactory [uuid="
+ sessionFactoryUuid + ",name=" + sessionFactoryName + "]"
);
}
}
}
return this;
}
private Class<?> entityClassForEntityName(String entityName) {
return sessionFactory == null
? metadataBuildingContext.getMetadataCollector().getEntityBinding( entityName ).getMappedClass()
: sessionFactory.getMappingMetamodel().findEntityDescriptor( entityName ).getMappedClass();
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private final ConcurrentMap<ArrayCacheKey, ArrayTupleType> arrayTuples = new ConcurrentHashMap<>();
public SqmBindableType<?> resolveTupleType(List<? extends SqmTypedNode<?>> typedNodes) {
final var components = new SqmBindableType<?>[typedNodes.size()];
for ( int i = 0; i < typedNodes.size(); i++ ) {
final var tupleElement = typedNodes.get(i);
final var sqmExpressible = tupleElement.getNodeType();
// keep null value for Named Parameters
if ( tupleElement instanceof SqmParameter<?> && sqmExpressible == null ) {
components[i] = QueryParameterJavaObjectType.INSTANCE;
}
else {
components[i] = sqmExpressible != null
? sqmExpressible
: getBasicTypeForJavaType( Object.class );
}
}
return arrayTuples.computeIfAbsent( new ArrayCacheKey( components ),
key -> new ArrayTupleType( key.components ) );
}
private static class ArrayCacheKey {
final SqmBindableType<?>[] components;
public ArrayCacheKey(SqmBindableType<?>[] components) {
this.components = components;
}
@Override
public boolean equals(Object object) {
return object instanceof ArrayCacheKey key
&& Arrays.equals( components, key.components );
}
@Override
public int hashCode() {
return Arrays.hashCode( components );
}
}
/**
* @see QueryHelper#highestPrecedenceType2
*/
public SqmBindableType<?> resolveArithmeticType(
SqmBindableType<?> firstType,
SqmBindableType<?> secondType,
BinaryArithmeticOperator operator) {
return resolveArithmeticType( firstType, secondType );
}
/**
* Determine the result type of an arithmetic operation as defined by the
* rules in section 6.5.8.1, taking converters into account.
*
* @see QueryHelper#highestPrecedenceType2
*/
public SqmBindableType<?> resolveArithmeticType(
SqmBindableType<?> firstType,
SqmBindableType<?> secondType) {
if ( getSqlTemporalType( firstType ) != null ) {
if ( secondType==null || getSqlTemporalType( secondType ) != null ) {
// special case for subtraction of two dates
// or timestamps resulting in a duration
return getBasicTypeRegistry().getRegisteredType( Duration.class );
}
else {
// must be postfix addition/subtraction of
// a duration to/from a date or timestamp
return firstType;
}
}
else if ( isDuration( secondType ) ) {
// if firstType is not known, and operator is
// addition/subtraction, then this can be
// either addition/subtraction of duration
// to/from temporal or addition/subtraction of
// durations in this case we shall return null;
// otherwise, it's either addition/subtraction of durations
// or prefix scalar multiplication of a duration
// return secondType;
return firstType == null ? null : secondType;
}
else if ( firstType==null && getSqlTemporalType( secondType ) != null ) {
// subtraction of a date or timestamp from a
// parameter (which doesn't have a type yet)
return getBasicTypeRegistry().getRegisteredType( Duration.class );
}
if ( firstType != null && ( secondType == null
|| !secondType.getRelationalJavaType().isWider( firstType.getRelationalJavaType() ) ) ) {
return resolveArithmeticType( firstType );
}
return secondType != null ? resolveArithmeticType( secondType ) : null;
}
/**
* Determine the result type of a unary arithmetic operation,
* taking converters into account.
*/
public SqmBindableType<?> resolveArithmeticType(SqmBindableType<?> expressible) {
return isNumberArray( expressible )
? expressible.getSqmType()
// Use the relational java type to account for possible converters
: getBasicTypeForJavaType( expressible.getRelationalJavaType().getJavaTypeClass() );
}
private static boolean matchesJavaType(SqmExpressible<?> type, Class<?> javaType) {
assert javaType != null;
return type != null && javaType.isAssignableFrom( type.getRelationalJavaType().getJavaTypeClass() );
}
private final ConcurrentHashMap<Type, BasicType<?>> basicTypeByJavaType = new ConcurrentHashMap<>();
public <J> BasicType<J> getBasicTypeForGenericJavaType(Class<? super J> javaType, Type... typeArguments) {
//noinspection unchecked
return (BasicType<J>) getBasicTypeForJavaType( new ParameterizedTypeImpl( javaType, typeArguments, null ) );
}
public <J> BasicType<J> getBasicTypeForJavaType(Class<J> javaType) {
//noinspection unchecked
return (BasicType<J>) getBasicTypeForJavaType( (Type) javaType );
}
public BasicType<?> getBasicTypeForJavaType(Type javaType) {
final var existing = basicTypeByJavaType.get( javaType );
if ( existing != null ) {
return existing;
}
else {
final var registeredType = basicTypeRegistry.getRegisteredType( javaType );
if ( registeredType != null ) {
basicTypeByJavaType.put( javaType, registeredType );
return registeredType;
}
else {
return null;
}
}
}
public <J> BasicType<J> standardBasicTypeForJavaType(Class<J> javaType) {
if ( javaType == null ) {
return null;
}
else {
return standardBasicTypeForJavaType( javaType,
javaTypeDescriptor -> new BasicTypeImpl<>( javaTypeDescriptor,
javaTypeDescriptor.getRecommendedJdbcType( getCurrentBaseSqlTypeIndicators() ) ) );
}
}
public BasicType<?> standardBasicTypeForJavaType(Type javaType) {
if ( javaType == null ) {
return null;
}
else {
return standardBasicTypeForJavaType( javaType,
javaTypeDescriptor -> new BasicTypeImpl<>( javaTypeDescriptor,
javaTypeDescriptor.getRecommendedJdbcType( getCurrentBaseSqlTypeIndicators() ) ) );
}
}
@Deprecated(since = "7.2", forRemoval = true) // Can be private
public <J> BasicType<J> standardBasicTypeForJavaType(
Class<J> javaType,
Function<JavaType<J>, BasicType<J>> creator) {
if ( javaType == null ) {
return null;
}
//noinspection unchecked
return (BasicType<J>) basicTypeByJavaType.computeIfAbsent(
javaType,
jt -> {
// See if one exists in the BasicTypeRegistry and use that one if so
final var registeredType = basicTypeRegistry.getRegisteredType( javaType );
return registeredType != null
? registeredType
: creator.apply( javaTypeRegistry.getDescriptor( javaType ) );
}
);
}
@Deprecated(since = "7.2", forRemoval = true) // Due to weird signature
public <J> BasicType<?> standardBasicTypeForJavaType(
Type javaType,
Function<JavaType<J>, BasicType<J>> creator) {
if ( javaType == null ) {
return null;
}
return basicTypeByJavaType.computeIfAbsent(
javaType,
jt -> {
// See if one exists in the BasicTypeRegistry and use that one if so
final var registeredType = basicTypeRegistry.getRegisteredType( javaType );
return registeredType != null
? registeredType
: creator.apply( javaTypeRegistry.getDescriptor( javaType ) );
}
);
}
@SuppressWarnings("deprecation")
public TemporalType getSqlTemporalType(SqmExpressible<?> type) {
return type == null ? null
: getSqlTemporalType( type.getRelationalJavaType()
.getRecommendedJdbcType( getCurrentBaseSqlTypeIndicators() ) );
}
@SuppressWarnings("deprecation")
public static TemporalType getSqlTemporalType(JdbcMapping jdbcMapping) {
return getSqlTemporalType( jdbcMapping.getJdbcType() );
}
@SuppressWarnings("deprecation")
public static TemporalType getSqlTemporalType(JdbcMappingContainer jdbcMappings) {
assert jdbcMappings.getJdbcTypeCount() == 1;
return getSqlTemporalType( jdbcMappings.getSingleJdbcMapping().getJdbcType() );
}
@SuppressWarnings("deprecation")
public static TemporalType getSqlTemporalType(MappingModelExpressible<?> type) {
if ( type instanceof BasicValuedMapping basicValuedMapping ) {
return getSqlTemporalType( basicValuedMapping.getJdbcMapping().getJdbcType() );
}
else if ( type instanceof EmbeddableValuedModelPart embeddableValuedModelPart ) {
// Handle the special embeddables for emulated offset/timezone handling
final var javaTypeClass = embeddableValuedModelPart.getJavaType().getJavaTypeClass();
if ( javaTypeClass == OffsetDateTime.class
|| javaTypeClass == ZonedDateTime.class ) {
return TemporalType.TIMESTAMP;
}
else if ( javaTypeClass == OffsetTime.class ) {
return TemporalType.TIME;
}
else {
return null;
}
}
else {
return null;
}
}
@SuppressWarnings("deprecation")
public static TemporalType getSqlTemporalType(JdbcType descriptor) {
return getSqlTemporalType( descriptor.getDefaultSqlTypeCode() );
}
@SuppressWarnings("deprecation")
protected static TemporalType getSqlTemporalType(int jdbcTypeCode) {
return switch ( jdbcTypeCode ) {
case SqlTypes.TIMESTAMP, SqlTypes.TIMESTAMP_WITH_TIMEZONE, SqlTypes.TIMESTAMP_UTC
-> TemporalType.TIMESTAMP;
case SqlTypes.TIME, SqlTypes.TIME_WITH_TIMEZONE, SqlTypes.TIME_UTC
-> TemporalType.TIME;
case SqlTypes.DATE
-> TemporalType.DATE;
default -> null;
};
}
public static IntervalType getSqlIntervalType(JdbcMappingContainer jdbcMappings) {
assert jdbcMappings.getJdbcTypeCount() == 1;
return getSqlIntervalType( jdbcMappings.getSingleJdbcMapping().getJdbcType() );
}
public static IntervalType getSqlIntervalType(JdbcType descriptor) {
return getSqlIntervalType( descriptor.getDefaultSqlTypeCode() );
}
protected static IntervalType getSqlIntervalType(int jdbcTypeCode) {
return jdbcTypeCode == SqlTypes.INTERVAL_SECOND ? IntervalType.SECOND : null;
}
public static boolean isJdbcTemporalType(SqmExpressible<?> type) {
return matchesJavaType( type, Date.class );
}
public static boolean isDuration(SqmExpressible<?> type) {
return matchesJavaType( type, Duration.class );
}
@Internal @SuppressWarnings("unchecked")
public <J> MutabilityPlan<J> createMutabilityPlan(Class<? extends MutabilityPlan<?>> planClass) {
return !scope.allowExtensionsInCdi
? (MutabilityPlan<J>) FallbackBeanInstanceProducer.INSTANCE.produceBeanInstance( planClass )
: (MutabilityPlan<J>) scope.getManagedBeanRegistry().getBean( planClass ).getBeanInstance();
}
@Internal @Incubating // find a new home for this operation
public final FormatMapper getJsonFormatMapper() {
return getSessionFactory().getSessionFactoryOptions().getJsonFormatMapper();
}
@Internal @Incubating // find a new home for this operation
public final FormatMapper getXmlFormatMapper() {
return getSessionFactory().getSessionFactoryOptions().getXmlFormatMapper();
}
}
|
apache/flink | 37,070 | flink-runtime/src/test/java/org/apache/flink/runtime/blob/BlobCachePutTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.blob;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.testutils.CheckedThread;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.OperatingSystem;
import org.apache.flink.util.concurrent.FutureUtils;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import javax.annotation.Nullable;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Supplier;
import static org.apache.flink.runtime.blob.BlobKey.BlobType.PERMANENT_BLOB;
import static org.apache.flink.runtime.blob.BlobKey.BlobType.TRANSIENT_BLOB;
import static org.apache.flink.runtime.blob.BlobKeyTest.verifyKeyDifferentHashEquals;
import static org.apache.flink.runtime.blob.BlobKeyTest.verifyType;
import static org.apache.flink.runtime.blob.BlobServerGetTest.verifyDeleted;
import static org.apache.flink.runtime.blob.BlobServerPutTest.BlockingInputStream;
import static org.apache.flink.runtime.blob.BlobServerPutTest.ChunkedInputStream;
import static org.apache.flink.runtime.blob.BlobServerPutTest.put;
import static org.apache.flink.runtime.blob.BlobServerPutTest.verifyContents;
import static org.apache.flink.runtime.blob.TestingBlobHelpers.checkFilesExist;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assumptions.assumeThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Tests for successful and failing PUT operations against the BLOB server, and successful GET
* operations.
*/
class BlobCachePutTest {
@TempDir private java.nio.file.Path tempDir;
private final Random rnd = new Random();
// --- concurrency tests for utility methods which could fail during the put operation ---
/** Checked thread that calls {@link TransientBlobCache#getStorageLocation(JobID, BlobKey)}. */
private static class TransientBlobCacheGetStorageLocation extends CheckedThread {
private final TransientBlobCache cache;
private final JobID jobId;
private final BlobKey key;
TransientBlobCacheGetStorageLocation(
TransientBlobCache cache, @Nullable JobID jobId, BlobKey key) {
this.cache = cache;
this.jobId = jobId;
this.key = key;
}
@Override
public void go() throws Exception {
cache.getStorageLocation(jobId, key);
}
}
/** Checked thread that calls {@link PermanentBlobCache#getStorageLocation(JobID, BlobKey)}. */
private static class PermanentBlobCacheGetStorageLocation extends CheckedThread {
private final PermanentBlobCache cache;
private final JobID jobId;
private final BlobKey key;
PermanentBlobCacheGetStorageLocation(PermanentBlobCache cache, JobID jobId, BlobKey key) {
this.cache = cache;
this.jobId = jobId;
this.key = key;
}
@Override
public void go() throws Exception {
cache.getStorageLocation(jobId, key);
}
}
/** Tests concurrent calls to {@link TransientBlobCache#getStorageLocation(JobID, BlobKey)}. */
@Test
void testTransientBlobCacheGetStorageLocationConcurrentNoJob() throws Exception {
testTransientBlobCacheGetStorageLocationConcurrent(null);
}
/** Tests concurrent calls to {@link TransientBlobCache#getStorageLocation(JobID, BlobKey)}. */
@Test
void testTransientBlobCacheGetStorageLocationConcurrentForJob() throws Exception {
testTransientBlobCacheGetStorageLocationConcurrent(new JobID());
}
private void testTransientBlobCacheGetStorageLocationConcurrent(@Nullable final JobID jobId)
throws Exception {
try (BlobServer server = TestingBlobUtils.createServer(tempDir);
TransientBlobCache cache = TestingBlobUtils.createTransientCache(tempDir, server)) {
server.start();
BlobKey key = new TransientBlobKey();
CheckedThread[] threads =
new CheckedThread[] {
new TransientBlobCacheGetStorageLocation(cache, jobId, key),
new TransientBlobCacheGetStorageLocation(cache, jobId, key),
new TransientBlobCacheGetStorageLocation(cache, jobId, key)
};
checkedThreadSimpleTest(threads);
}
}
/** Tests concurrent calls to {@link PermanentBlobCache#getStorageLocation(JobID, BlobKey)}. */
@Test
void testPermanentBlobCacheGetStorageLocationConcurrentForJob() throws Exception {
final JobID jobId = new JobID();
try (BlobServer server = TestingBlobUtils.createServer(tempDir);
PermanentBlobCache cache = TestingBlobUtils.createPermanentCache(tempDir, server)) {
server.start();
BlobKey key = new PermanentBlobKey();
CheckedThread[] threads =
new CheckedThread[] {
new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
new PermanentBlobCacheGetStorageLocation(cache, jobId, key)
};
checkedThreadSimpleTest(threads);
}
}
/**
* Helper method to first start all threads and then wait for their completion.
*
* @param threads threads to use
* @throws Exception exceptions that are thrown from the threads
*/
private void checkedThreadSimpleTest(CheckedThread[] threads) throws Exception {
// start all threads
for (CheckedThread t : threads) {
t.start();
}
// wait for thread completion and check exceptions
for (CheckedThread t : threads) {
t.sync();
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutBufferTransientSuccessfulGet1() throws IOException, InterruptedException {
testPutBufferSuccessfulGet(null, null, TRANSIENT_BLOB);
}
@Test
void testPutBufferTransientSuccessfulGet2() throws IOException, InterruptedException {
testPutBufferSuccessfulGet(null, new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutBufferTransientSuccessfulGet3() throws IOException, InterruptedException {
testPutBufferSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutBufferTransientSuccessfulGet4() throws IOException, InterruptedException {
testPutBufferSuccessfulGet(new JobID(), null, TRANSIENT_BLOB);
}
@Test
void testPutBufferPermanentSuccessfulGet() throws IOException, InterruptedException {
testPutBufferSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB);
}
/**
* Uploads two byte arrays for different jobs into the server via the {@link BlobCacheService}.
* File transfers should be successful.
*
* @param jobId1 first job id
* @param jobId2 second job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferSuccessfulGet(
@Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType)
throws IOException, InterruptedException {
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
BlobKey key1a = put(cache, jobId1, data, blobType);
assertThat(key1a).isNotNull();
verifyType(blobType, key1a);
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(cache, jobId1, data, blobType);
assertThat(key1a2).isNotNull();
verifyType(blobType, key1a2);
verifyKeyDifferentHashEquals(key1a, key1a2);
BlobKey key1b = put(cache, jobId1, data2, blobType);
assertThat(key1b).isNotNull();
verifyType(blobType, key1b);
// files should be available on the server
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
BlobKey key2a = put(cache, jobId2, data, blobType);
assertThat(key2a).isNotNull();
verifyType(blobType, key2a);
verifyKeyDifferentHashEquals(key1a, key2a);
BlobKey key2b = put(cache, jobId2, data2, blobType);
assertThat(key2b).isNotNull();
verifyType(blobType, key2b);
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// now verify we can access the BLOBs from the cache
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
// transient BLOBs should be deleted from the server, eventually
if (blobType == TRANSIENT_BLOB) {
verifyDeletedEventually(server, jobId1, key1a);
verifyDeletedEventually(server, jobId1, key1b);
verifyDeletedEventually(server, jobId2, key2a);
verifyDeletedEventually(server, jobId2, key2b);
// the files are still there on the cache though
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
} else {
// still on the server for permanent BLOBs after accesses from a cache
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
}
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutStreamTransientSuccessfulGet1() throws IOException, InterruptedException {
testPutStreamTransientSuccessfulGet(null, null);
}
@Test
void testPutStreamTransientSuccessfulGet2() throws IOException, InterruptedException {
testPutStreamTransientSuccessfulGet(null, new JobID());
}
@Test
void testPutStreamTransientSuccessfulGet3() throws IOException, InterruptedException {
testPutStreamTransientSuccessfulGet(new JobID(), new JobID());
}
@Test
void testPutStreamTransientSuccessfulGet4() throws IOException, InterruptedException {
testPutStreamTransientSuccessfulGet(new JobID(), null);
}
/**
* Uploads two file streams for different jobs into the server via the {@link BlobCacheService}.
* File transfers should be successful.
*
* <p>Note that high-availability uploads of streams is currently only possible at the {@link
* BlobServer}.
*
* @param jobId1 first job id
* @param jobId2 second job id
*/
private void testPutStreamTransientSuccessfulGet(@Nullable JobID jobId1, @Nullable JobID jobId2)
throws IOException, InterruptedException {
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
TransientBlobKey key1a =
(TransientBlobKey)
put(cache, jobId1, new ByteArrayInputStream(data), TRANSIENT_BLOB);
assertThat(key1a).isNotNull();
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(cache, jobId1, new ByteArrayInputStream(data), TRANSIENT_BLOB);
assertThat(key1a2).isNotNull();
verifyKeyDifferentHashEquals(key1a, key1a2);
TransientBlobKey key1b =
(TransientBlobKey)
put(cache, jobId1, new ByteArrayInputStream(data2), TRANSIENT_BLOB);
assertThat(key1b).isNotNull();
// files should be available on the server
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
TransientBlobKey key2a =
(TransientBlobKey)
put(cache, jobId2, new ByteArrayInputStream(data), TRANSIENT_BLOB);
assertThat(key2a).isNotNull();
verifyKeyDifferentHashEquals(key1a, key2a);
TransientBlobKey key2b =
(TransientBlobKey)
put(cache, jobId2, new ByteArrayInputStream(data2), TRANSIENT_BLOB);
assertThat(key2b).isNotNull();
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// now verify we can access the BLOBs from the cache
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(server, jobId1, key1a);
verifyDeletedEventually(server, jobId1, key1b);
verifyDeletedEventually(server, jobId2, key2a);
verifyDeletedEventually(server, jobId2, key2b);
// the files are still there on the cache though
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutChunkedStreamTransientSuccessfulGet1() throws IOException, InterruptedException {
testPutChunkedStreamTransientSuccessfulGet(null, null);
}
@Test
void testPutChunkedStreamTransientSuccessfulGet2() throws IOException, InterruptedException {
testPutChunkedStreamTransientSuccessfulGet(null, new JobID());
}
@Test
void testPutChunkedStreamTransientSuccessfulGet3() throws IOException, InterruptedException {
testPutChunkedStreamTransientSuccessfulGet(new JobID(), new JobID());
}
@Test
void testPutChunkedStreamTransientSuccessfulGet4() throws IOException, InterruptedException {
testPutChunkedStreamTransientSuccessfulGet(new JobID(), null);
}
/**
* Uploads two chunked file streams for different jobs into the server via the {@link
* BlobCacheService}. File transfers should be successful.
*
* <p>Note that high-availability uploads of streams is currently only possible at the {@link
* BlobServer}.
*
* @param jobId1 first job id
* @param jobId2 second job id
*/
private void testPutChunkedStreamTransientSuccessfulGet(
@Nullable JobID jobId1, @Nullable JobID jobId2)
throws IOException, InterruptedException {
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
TransientBlobKey key1a =
(TransientBlobKey)
put(cache, jobId1, new ChunkedInputStream(data, 19), TRANSIENT_BLOB);
assertThat(key1a).isNotNull();
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(cache, jobId1, new ChunkedInputStream(data, 19), TRANSIENT_BLOB);
assertThat(key1a2).isNotNull();
verifyKeyDifferentHashEquals(key1a, key1a2);
TransientBlobKey key1b =
(TransientBlobKey)
put(cache, jobId1, new ChunkedInputStream(data2, 19), TRANSIENT_BLOB);
assertThat(key1b).isNotNull();
// files should be available on the server
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
TransientBlobKey key2a =
(TransientBlobKey)
put(cache, jobId2, new ChunkedInputStream(data, 19), TRANSIENT_BLOB);
assertThat(key2a).isNotNull();
verifyKeyDifferentHashEquals(key1a, key2a);
TransientBlobKey key2b =
(TransientBlobKey)
put(cache, jobId2, new ChunkedInputStream(data2, 19), TRANSIENT_BLOB);
assertThat(key2b).isNotNull();
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// now verify we can access the BLOBs from the cache
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
// transient BLOBs should be deleted from the server, eventually
verifyDeletedEventually(server, jobId1, key1a);
verifyDeletedEventually(server, jobId1, key1b);
verifyDeletedEventually(server, jobId2, key2a);
verifyDeletedEventually(server, jobId2, key2b);
// the files are still there on the cache though
verifyContents(cache, jobId1, key1a, data);
verifyContents(cache, jobId1, key1b, data2);
verifyContents(cache, jobId2, key2a, data);
verifyContents(cache, jobId2, key2b, data2);
}
}
// --------------------------------------------------------------------------------------------
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsNoJob() throws IOException {
testPutBufferFails(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsForJob() throws IOException {
testPutBufferFails(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsForJobHa() throws IOException {
testPutBufferFails(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot create any files via the {@link
* BlobCacheService}. File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFails(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
// make sure the blob server cannot create any files in its storage dir
File tempFileDir = server.createTemporaryFilename().getParentFile().getParentFile();
assertThat(tempFileDir.setExecutable(true, false)).isTrue();
assertThat(tempFileDir.setReadable(true, false)).isTrue();
assertThat(tempFileDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
assertThatThrownBy(() -> put(cache, jobId, data, blobType))
.isInstanceOf(IOException.class)
.hasMessageStartingWith("PUT operation failed: ");
assertThat(tempFileDir.setWritable(true, false)).isTrue();
}
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingNoJob() throws IOException {
testPutBufferFailsIncoming(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingForJob() throws IOException {
testPutBufferFailsIncoming(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingForJobHa() throws IOException {
testPutBufferFailsIncoming(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot create incoming files via the {@link
* BlobCacheService}. File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFailsIncoming(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
File tempFileDir = null;
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
// make sure the blob server cannot create any files in its storage dir
tempFileDir = server.createTemporaryFilename().getParentFile();
assertThat(tempFileDir.setExecutable(true, false)).isTrue();
assertThat(tempFileDir.setReadable(true, false)).isTrue();
assertThat(tempFileDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
try {
assertThatThrownBy(() -> put(cache, jobId, data, blobType))
.isInstanceOf(IOException.class)
.hasMessageStartingWith("PUT operation failed: ");
} finally {
File storageDir = tempFileDir.getParentFile();
// only the incoming directory should exist (no job directory!)
assertThat(storageDir.list()).containsExactly("incoming");
}
} finally {
// set writable again to make sure we can remove the directory
if (tempFileDir != null) {
//noinspection ResultOfMethodCallIgnored
tempFileDir.setWritable(true, false);
}
}
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreNoJob() throws IOException {
testPutBufferFailsStore(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreForJob() throws IOException {
testPutBufferFailsStore(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreForJobHa() throws IOException {
testPutBufferFailsStore(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot create files via the {@link BlobCacheService}.
* File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFailsStore(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir);
File jobStoreDir = null;
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
// make sure the blob server cannot create any files in its storage dir
jobStoreDir =
server.getStorageLocation(jobId, BlobKey.createKey(blobType)).getParentFile();
assertThat(jobStoreDir.setExecutable(true, false)).isTrue();
assertThat(jobStoreDir.setReadable(true, false)).isTrue();
assertThat(jobStoreDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
try {
assertThatThrownBy(() -> put(cache, jobId, data, blobType))
.isInstanceOf(IOException.class)
.hasMessageStartingWith("PUT operation failed: ");
} finally {
// there should be no remaining incoming files
File incomingFileDir = new File(jobStoreDir.getParent(), "incoming");
assertThat(incomingFileDir.list()).isEmpty();
// there should be no files in the job directory
assertThat(jobStoreDir.list()).isEmpty();
}
} finally {
// set writable again to make sure we can remove the directory
if (jobStoreDir != null) {
//noinspection ResultOfMethodCallIgnored
jobStoreDir.setWritable(true, false);
}
}
}
@Test
void testConcurrentPutOperationsNoJob()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(null, TRANSIENT_BLOB);
}
@Test
void testConcurrentPutOperationsForJob()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(new JobID(), TRANSIENT_BLOB);
}
@Test
void testConcurrentPutOperationsForJobHa()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(new JobID(), PERMANENT_BLOB);
}
/**
* [FLINK-6020] Tests that concurrent put operations will only upload the file once to the
* {@link BlobStore} and that the files are not corrupt at any time.
*
* @param jobId job ID to use (or <tt>null</tt> if job-unrelated)
* @param blobType whether the BLOB should become permanent or transient
*/
private void testConcurrentPutOperations(
@Nullable final JobID jobId, final BlobKey.BlobType blobType)
throws IOException, InterruptedException, ExecutionException {
final Configuration config = new Configuration();
final BlobStore blobStoreServer = mock(BlobStore.class);
final BlobStore blobStoreCache = mock(BlobStore.class);
int concurrentPutOperations = 2;
int dataSize = 1024;
final CountDownLatch countDownLatch = new CountDownLatch(concurrentPutOperations);
final byte[] data = new byte[dataSize];
final List<Path> jars;
if (blobType == PERMANENT_BLOB) {
// implement via JAR file upload instead:
File tmpFile = new File(tempDir.toFile(), "test_file");
FileUtils.writeByteArrayToFile(tmpFile, data);
jars = Collections.singletonList(new Path(tmpFile.getAbsolutePath()));
} else {
jars = null;
}
Collection<CompletableFuture<BlobKey>> allFutures =
new ArrayList<>(concurrentPutOperations);
ExecutorService executor = Executors.newFixedThreadPool(concurrentPutOperations);
Tuple2<BlobServer, BlobCacheService> serverAndCache =
TestingBlobUtils.createServerAndCache(tempDir, blobStoreServer, blobStoreCache);
try (BlobServer server = serverAndCache.f0;
BlobCacheService cache = serverAndCache.f1) {
server.start();
// for highAvailability
final InetSocketAddress serverAddress =
new InetSocketAddress("localhost", server.getPort());
// uploading HA BLOBs works on BlobServer only (and, for now, via the BlobClient)
for (int i = 0; i < concurrentPutOperations; i++) {
final Supplier<BlobKey> callable;
if (blobType == PERMANENT_BLOB) {
// cannot use a blocking stream here (upload only possible via files)
callable =
() -> {
try {
List<PermanentBlobKey> keys =
BlobClient.uploadFiles(
serverAddress, config, jobId, jars);
assertThat(keys).hasSize(1);
BlobKey uploadedKey = keys.get(0);
// check the uploaded file's contents (concurrently)
verifyContents(server, jobId, uploadedKey, data);
return uploadedKey;
} catch (IOException e) {
throw new CompletionException(
new FlinkException("Could not upload blob.", e));
}
};
} else {
callable =
() -> {
try {
BlockingInputStream inputStream =
new BlockingInputStream(countDownLatch, data);
BlobKey uploadedKey = put(cache, jobId, inputStream, blobType);
// check the uploaded file's contents (concurrently)
verifyContents(server, jobId, uploadedKey, data);
return uploadedKey;
} catch (IOException e) {
throw new CompletionException(
new FlinkException("Could not upload blob.", e));
}
};
}
CompletableFuture<BlobKey> putFuture =
CompletableFuture.supplyAsync(callable, executor);
allFutures.add(putFuture);
}
FutureUtils.ConjunctFuture<Collection<BlobKey>> conjunctFuture =
FutureUtils.combineAll(allFutures);
// wait until all operations have completed and check that no exception was thrown
Collection<BlobKey> blobKeys = conjunctFuture.get();
Iterator<BlobKey> blobKeyIterator = blobKeys.iterator();
assertThat(blobKeyIterator).hasNext();
BlobKey blobKey = blobKeyIterator.next();
// make sure that all blob keys are the same
while (blobKeyIterator.hasNext()) {
// check for unique BlobKey, but should have same hash
verifyKeyDifferentHashEquals(blobKey, blobKeyIterator.next());
}
// check the uploaded file's contents
verifyContents(server, jobId, blobKey, data);
// check that we only uploaded the file once to the blob store
if (blobType == PERMANENT_BLOB) {
verify(blobStoreServer, times(1)).put(any(File.class), eq(jobId), eq(blobKey));
} else {
// can't really verify much in the other cases other than that the put operations
// should
// work and not corrupt files
verify(blobStoreServer, times(0)).put(any(File.class), eq(jobId), eq(blobKey));
}
// caches must not access the blob store (they are not allowed to write there)
verify(blobStoreCache, times(0)).put(any(File.class), eq(jobId), eq(blobKey));
} finally {
executor.shutdownNow();
}
}
/**
* Checks that the given blob will be deleted at the {@link BlobServer} eventually (waits at
* most 30s).
*
* @param server BLOB server
* @param jobId job ID or <tt>null</tt> if job-unrelated
* @param keys key(s) identifying the BLOB to request
*/
static void verifyDeletedEventually(BlobServer server, @Nullable JobID jobId, BlobKey... keys)
throws IOException, InterruptedException {
long deadline = System.currentTimeMillis() + 30_000L;
do {
Thread.sleep(10);
} while (checkFilesExist(jobId, Arrays.asList(keys), server, false) != 0
&& System.currentTimeMillis() < deadline);
for (BlobKey key : keys) {
verifyDeleted(server, jobId, key);
}
}
}
|
hibernate/hibernate-orm | 34,585 | tooling/metamodel-generator/src/main/java/org/hibernate/processor/HibernateProcessor.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.processor;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.processor.annotation.AnnotationMetaEntity;
import org.hibernate.processor.annotation.AnnotationMetaPackage;
import org.hibernate.processor.annotation.NonManagedMetamodel;
import org.hibernate.processor.model.Metamodel;
import org.hibernate.processor.util.Constants;
import org.hibernate.processor.xml.JpaDescriptorParser;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedOptions;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.QualifiedNameable;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.tools.Diagnostic;
import javax.tools.StandardLocation;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import static java.lang.Boolean.parseBoolean;
import static javax.lang.model.util.ElementFilter.fieldsIn;
import static javax.lang.model.util.ElementFilter.methodsIn;
import static org.hibernate.processor.HibernateProcessor.ADD_GENERATED_ANNOTATION;
import static org.hibernate.processor.HibernateProcessor.ADD_GENERATION_DATE;
import static org.hibernate.processor.HibernateProcessor.ADD_SUPPRESS_WARNINGS_ANNOTATION;
import static org.hibernate.processor.HibernateProcessor.DEBUG_OPTION;
import static org.hibernate.processor.HibernateProcessor.EXCLUDE;
import static org.hibernate.processor.HibernateProcessor.FULLY_ANNOTATION_CONFIGURED_OPTION;
import static org.hibernate.processor.HibernateProcessor.INCLUDE;
import static org.hibernate.processor.HibernateProcessor.INDEX;
import static org.hibernate.processor.HibernateProcessor.LAZY_XML_PARSING;
import static org.hibernate.processor.HibernateProcessor.ORM_XML_OPTION;
import static org.hibernate.processor.HibernateProcessor.PERSISTENCE_XML_OPTION;
import static org.hibernate.processor.HibernateProcessor.SUPPRESS_JAKARTA_DATA_METAMODEL;
import static org.hibernate.processor.util.Constants.EMBEDDABLE;
import static org.hibernate.processor.util.Constants.ENTITY;
import static org.hibernate.processor.util.Constants.FIND;
import static org.hibernate.processor.util.Constants.HIB_FETCH_PROFILE;
import static org.hibernate.processor.util.Constants.HIB_FETCH_PROFILES;
import static org.hibernate.processor.util.Constants.HIB_FILTER_DEF;
import static org.hibernate.processor.util.Constants.HIB_FILTER_DEFS;
import static org.hibernate.processor.util.Constants.HIB_NAMED_NATIVE_QUERIES;
import static org.hibernate.processor.util.Constants.HIB_NAMED_NATIVE_QUERY;
import static org.hibernate.processor.util.Constants.HIB_NAMED_QUERIES;
import static org.hibernate.processor.util.Constants.HIB_NAMED_QUERY;
import static org.hibernate.processor.util.Constants.HQL;
import static org.hibernate.processor.util.Constants.JD_REPOSITORY;
import static org.hibernate.processor.util.Constants.MAPPED_SUPERCLASS;
import static org.hibernate.processor.util.Constants.NAMED_ENTITY_GRAPH;
import static org.hibernate.processor.util.Constants.NAMED_ENTITY_GRAPHS;
import static org.hibernate.processor.util.Constants.NAMED_NATIVE_QUERIES;
import static org.hibernate.processor.util.Constants.NAMED_NATIVE_QUERY;
import static org.hibernate.processor.util.Constants.NAMED_QUERIES;
import static org.hibernate.processor.util.Constants.NAMED_QUERY;
import static org.hibernate.processor.util.Constants.SQL;
import static org.hibernate.processor.util.Constants.SQL_RESULT_SET_MAPPING;
import static org.hibernate.processor.util.Constants.SQL_RESULT_SET_MAPPINGS;
import static org.hibernate.processor.util.TypeUtils.containsAnnotation;
import static org.hibernate.processor.util.TypeUtils.getAnnotationMirror;
import static org.hibernate.processor.util.TypeUtils.getAnnotationValue;
import static org.hibernate.processor.util.TypeUtils.hasAnnotation;
import static org.hibernate.processor.util.TypeUtils.isClassRecordOrInterfaceType;
import static org.hibernate.processor.util.TypeUtils.isMemberType;
/**
* Main annotation processor.
*
* @author Max Andersen
* @author Hardy Ferentschik
* @author Emmanuel Bernard
* @author Gavin King
*/
@SupportedAnnotationTypes({
// standard for JPA 2
ENTITY, MAPPED_SUPERCLASS, EMBEDDABLE,
// standard for JPA 3.2
NAMED_QUERY, NAMED_QUERIES, NAMED_NATIVE_QUERY, NAMED_NATIVE_QUERIES,
NAMED_ENTITY_GRAPH, NAMED_ENTITY_GRAPHS, SQL_RESULT_SET_MAPPING, SQL_RESULT_SET_MAPPINGS,
// extra for Hibernate
HIB_FETCH_PROFILE, HIB_FETCH_PROFILES, HIB_FILTER_DEF, HIB_FILTER_DEFS,
HIB_NAMED_QUERY, HIB_NAMED_QUERIES, HIB_NAMED_NATIVE_QUERY, HIB_NAMED_NATIVE_QUERIES,
// Hibernate query methods
HQL, SQL, FIND,
// Jakarta Data repositories
JD_REPOSITORY // do not need to list any other Jakarta Data annotations here
})
@SupportedOptions({
DEBUG_OPTION,
PERSISTENCE_XML_OPTION,
ORM_XML_OPTION,
FULLY_ANNOTATION_CONFIGURED_OPTION,
LAZY_XML_PARSING,
ADD_GENERATION_DATE,
ADD_GENERATED_ANNOTATION,
ADD_SUPPRESS_WARNINGS_ANNOTATION,
SUPPRESS_JAKARTA_DATA_METAMODEL,
INCLUDE, EXCLUDE,
INDEX
})
public class HibernateProcessor extends AbstractProcessor {
/**
* Debug logging from the processor
*/
public static final String DEBUG_OPTION = "debug";
/**
* Path to a {@code persistence.xml} file
*/
public static final String PERSISTENCE_XML_OPTION = "persistenceXml";
/**
* Path to an {@code orm.xml} file
*/
public static final String ORM_XML_OPTION = "ormXml";
/**
* Controls whether the processor should consider XML files
*/
public static final String FULLY_ANNOTATION_CONFIGURED_OPTION = "fullyAnnotationConfigured";
/**
* Controls whether the processor should only load XML files when there have been changes
*/
public static final String LAZY_XML_PARSING = "lazyXmlParsing";
/**
* Whether the {@code jakarta.annotation.Generated} annotation should be added to
* the generated classes
*/
public static final String ADD_GENERATED_ANNOTATION = "addGeneratedAnnotation";
/**
* Assuming that {@linkplain #ADD_GENERATED_ANNOTATION} is enabled, this option controls
* whether {@code @Generated#date} should be populated.
*/
public static final String ADD_GENERATION_DATE = "addGenerationDate";
/**
* A comma-separated list of warnings to suppress, or simply {@code true}
* if {@code @SuppressWarnings({"deprecation","rawtypes"})} should be
* added to the generated classes.
*/
public static final String ADD_SUPPRESS_WARNINGS_ANNOTATION = "addSuppressWarningsAnnotation";
/**
* Option to suppress generation of the Jakarta Data static metamodel,
* even when Jakarta Data is available on the build path.
*/
public static final String SUPPRESS_JAKARTA_DATA_METAMODEL = "suppressJakartaDataMetamodel";
/**
* Option to include only certain types, according to a list of patterns.
* The wildcard character is {@code *}, and patterns are comma-separated.
* For example: {@code *.entity.*,*Repository}. The default include is
* simply {@code *}, meaning that all types are included.
*/
public static final String INCLUDE = "include";
/**
* Option to exclude certain types, according to a list of patterns.
* The wildcard character is {@code *}, and patterns are comma-separated.
* For example: {@code *.framework.*,*$$}. The default exclude is
* empty.
*/
public static final String EXCLUDE = "exclude";
/**
* Option to suppress creation of a filesystem-based index of entity
* types and enums for use by the query validator. By default, and
* index is created.
*/
public static final String INDEX = "index";
private static final boolean ALLOW_OTHER_PROCESSORS_TO_CLAIM_ANNOTATIONS = false;
// dupe of ProcessorSessionFactory.ENTITY_INDEX for reasons of modularity
public static final String ENTITY_INDEX = "entity.index";
private Context context;
@Override
public synchronized void init(ProcessingEnvironment processingEnvironment) {
super.init( processingEnvironment );
context = new Context( processingEnvironment );
context.logMessage(
Diagnostic.Kind.NOTE,
"Hibernate compile-time tooling " + Version.getVersionString()
);
final boolean fullyAnnotationConfigured = handleSettings( processingEnvironment );
if ( !fullyAnnotationConfigured ) {
new JpaDescriptorParser( context ).parseMappingXml();
if ( context.isFullyXmlConfigured() ) {
createMetaModelClasses();
}
}
}
private boolean handleSettings(ProcessingEnvironment environment) {
final PackageElement jakartaInjectPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.inject" );
final PackageElement jakartaAnnotationPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.annotation" );
final PackageElement jakartaContextPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.enterprise.context" );
final PackageElement jakartaTransactionPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.transaction" );
final PackageElement jakartaDataPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.data" );
final PackageElement quarkusOrmPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.orm" );
final PackageElement quarkusReactivePackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.reactive.runtime" );
final PackageElement dataEventPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "jakarta.data.event" );
PackageElement quarkusOrmPanachePackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.orm.panache" );
PackageElement quarkusPanache2Package =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.panache" );
PackageElement quarkusReactivePanachePackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.reactive.panache" );
// This is imported automatically by Quarkus extensions when HR is also imported
PackageElement quarkusReactivePanacheCommonPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "io.quarkus.hibernate.reactive.panache.common" );
if ( packagePresent(quarkusReactivePanachePackage)
&& packagePresent(quarkusOrmPanachePackage) ) {
context.logMessage(
Diagnostic.Kind.WARNING,
"Both Quarkus Hibernate ORM and Hibernate Reactive with Panache detected: this is not supported, so will proceed as if none were there"
);
quarkusOrmPanachePackage = quarkusReactivePanachePackage = null;
}
final PackageElement springBeansPackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "org.springframework.beans.factory" );
final PackageElement springStereotypePackage =
context.getProcessingEnvironment().getElementUtils()
.getPackageElement( "org.springframework.stereotype" );
context.setAddInjectAnnotation( packagePresent(jakartaInjectPackage) );
context.setAddNonnullAnnotation( packagePresent(jakartaAnnotationPackage) );
context.setAddGeneratedAnnotation( packagePresent(jakartaAnnotationPackage) );
context.setAddDependentAnnotation( packagePresent(jakartaContextPackage) );
context.setAddTransactionScopedAnnotation( packagePresent(jakartaTransactionPackage) );
context.setDataEventPackageAvailable( packagePresent(dataEventPackage) );
context.setQuarkusInjection( packagePresent(quarkusOrmPackage) || packagePresent(quarkusReactivePackage) );
context.setUsesQuarkusOrm( packagePresent(quarkusOrmPanachePackage) );
context.setUsesQuarkusReactive( packagePresent(quarkusReactivePanachePackage) );
context.setSpringInjection( packagePresent(springBeansPackage) );
context.setAddComponentAnnotation( packagePresent(springStereotypePackage) );
context.setUsesQuarkusPanache2( packagePresent(quarkusPanache2Package) );
context.setUsesQuarkusReactiveCommon( packagePresent(quarkusReactivePanacheCommonPackage) );
final Map<String, String> options = environment.getOptions();
final boolean suppressJakartaData = parseBoolean( options.get( SUPPRESS_JAKARTA_DATA_METAMODEL ) );
context.setGenerateJakartaDataStaticMetamodel( !suppressJakartaData && packagePresent(jakartaDataPackage) );
final String setting = options.get( ADD_GENERATED_ANNOTATION );
if ( setting != null ) {
context.setAddGeneratedAnnotation( parseBoolean( setting ) );
}
context.setAddGenerationDate( parseBoolean( options.get( ADD_GENERATION_DATE ) ) );
final String suppressedWarnings = options.get( ADD_SUPPRESS_WARNINGS_ANNOTATION );
if ( suppressedWarnings != null ) {
context.setSuppressedWarnings( parseBoolean( suppressedWarnings )
? new String[] {"deprecation", "rawtypes"} // legacy behavior from HHH-12068
: suppressedWarnings.replace( " ", "" ).split( ",\\s*" ) );
}
context.setInclude( options.getOrDefault( INCLUDE, "*" ) );
context.setExclude( options.getOrDefault( EXCLUDE, "" ) );
context.setIndexing( parseBoolean( options.getOrDefault( INDEX, "true" ) ) );
return parseBoolean( options.get( FULLY_ANNOTATION_CONFIGURED_OPTION ) );
}
private static boolean packagePresent(@Nullable PackageElement pack) {
return pack != null
//HHH-18019 ecj always returns a non-null PackageElement
&& !pack.getEnclosedElements().isEmpty();
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
@Override
public boolean process(final Set<? extends TypeElement> annotations, final RoundEnvironment roundEnvironment) {
// https://hibernate.atlassian.net/browse/METAGEN-45 claims that we need
// if ( roundEnvironment.processingOver() || annotations.size() == 0)
// but that was back on JDK 6 and I don't see why it should be necessary
// - in fact we want to use the last round to run the 'elementsToRedo'
if ( roundEnvironment.processingOver() ) {
final Set<CharSequence> elementsToRedo = context.getElementsToRedo();
if ( !elementsToRedo.isEmpty() ) {
context.logMessage( Diagnostic.Kind.ERROR, "Failed to generate code for " + elementsToRedo );
}
writeIndex();
}
else if ( context.isFullyXmlConfigured() ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"Skipping the processing of annotations since persistence unit is purely XML configured."
);
}
else {
context.logMessage( Diagnostic.Kind.OTHER, "Starting new round" );
try {
processClasses( roundEnvironment );
createMetaModelClasses();
}
catch (Exception e) {
final StringWriter stack = new StringWriter();
e.printStackTrace( new PrintWriter(stack) );
final Throwable cause = e.getCause();
final String message =
cause != null && cause != e
? e.getMessage() + " caused by " + cause.getMessage()
: e.getMessage();
context.logMessage( Diagnostic.Kind.ERROR, "Error running Hibernate processor: " + message );
context.logMessage( Diagnostic.Kind.ERROR, stack.toString() );
}
}
return ALLOW_OTHER_PROCESSORS_TO_CLAIM_ANNOTATIONS;
}
private boolean included(Element element) {
if ( element instanceof TypeElement || element instanceof PackageElement ) {
final QualifiedNameable nameable = (QualifiedNameable) element;
return context.isIncluded( nameable.getQualifiedName().toString() );
}
else {
return false;
}
}
private void processClasses(RoundEnvironment roundEnvironment) {
for ( CharSequence elementName : new HashSet<>( context.getElementsToRedo() ) ) {
context.logMessage( Diagnostic.Kind.OTHER, "Redoing element '" + elementName + "'" );
final TypeElement typeElement = context.getElementUtils().getTypeElement( elementName );
try {
final AnnotationMetaEntity metaEntity =
AnnotationMetaEntity.create( typeElement, context,
parentMetadata( typeElement, context::getMetaEntity ) );
context.addMetaAuxiliary( metaEntity.getQualifiedName(), metaEntity );
context.removeElementToRedo( elementName );
}
catch (ProcessLaterException processLaterException) {
// leave it there for next time
}
}
for ( Element element : roundEnvironment.getRootElements() ) {
processElement( element, null );
}
}
private void processElement(Element element, @Nullable Element parent) {
try {
inspectRootElement(element, parent, null);
}
catch ( ProcessLaterException processLaterException ) {
if ( element instanceof TypeElement typeElement ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"Could not process '" + element + "' (will redo in next round)"
);
context.addElementToRedo( typeElement.getQualifiedName() );
}
}
}
private @Nullable AnnotationMetaEntity parentMetadata(
@Nullable Element parent, Function<String, @Nullable Object> metamodel) {
if ( parent instanceof TypeElement parentElement
&& metamodel.apply( parentElement.getQualifiedName().toString() )
instanceof AnnotationMetaEntity parentMetaEntity ) {
return parentMetaEntity;
}
else {
return null;
}
}
private boolean hasPackageAnnotation(Element element, String annotation) {
final PackageElement pack = context.getElementUtils().getPackageOf( element ); // null for module descriptor
return pack != null && hasAnnotation( pack, annotation );
}
private void inspectRootElement(Element element, @Nullable Element parent, @Nullable TypeElement primaryEntity) {
if ( !included( element )
|| hasAnnotation( element, Constants.EXCLUDE )
|| hasPackageAnnotation( element, Constants.EXCLUDE )
|| element.getModifiers().contains( Modifier.PRIVATE ) ) {
// skip it completely
return;
}
else if ( isEntityOrEmbeddable( element )
&& !element.getModifiers().contains( Modifier.PRIVATE ) ) {
context.logMessage( Diagnostic.Kind.OTHER, "Processing annotated entity class '" + element + "'" );
handleRootElementAnnotationMirrors( element, parent );
}
else if ( hasAuxiliaryAnnotations( element ) ) {
context.logMessage( Diagnostic.Kind.OTHER, "Processing annotated class '" + element + "'" );
handleRootElementAuxiliaryAnnotationMirrors( element );
}
else if ( element instanceof TypeElement typeElement ) {
final AnnotationMirror repository = getAnnotationMirror( element, JD_REPOSITORY );
if ( repository != null ) {
final AnnotationValue provider = getAnnotationValue( repository, "provider" );
if ( provider == null
|| provider.getValue().toString().isEmpty()
|| provider.getValue().toString().equalsIgnoreCase("hibernate") ) {
context.logMessage( Diagnostic.Kind.OTHER, "Processing repository class '" + element + "'" );
final AnnotationMetaEntity metaEntity =
AnnotationMetaEntity.create( typeElement, context,
parentMetadata( parent, context::getMetaEntity ),
primaryEntity );
if ( metaEntity.isInitialized() ) {
context.addMetaAuxiliary( metaEntity.getQualifiedName(), metaEntity );
}
// otherwise discard it (assume it has query by magical method name stuff)
}
}
else {
for ( Element member : typeElement.getEnclosedElements() ) {
if ( hasAnnotation( member, HQL, SQL, FIND ) ) {
context.logMessage( Diagnostic.Kind.OTHER, "Processing annotated class '" + element + "'" );
final AnnotationMetaEntity metaEntity =
AnnotationMetaEntity.create( typeElement, context,
parentMetadata( parent, context::getMetaEntity ),
primaryEntity );
context.addMetaAuxiliary( metaEntity.getQualifiedName(), metaEntity );
break;
}
}
if ( enclosesEntityOrEmbeddable( element ) ) {
final NonManagedMetamodel metaEntity =
NonManagedMetamodel.create( typeElement, context, false,
parentMetadata( parent, context::getMetamodel ) );
context.addMetaEntity( metaEntity.getQualifiedName(), metaEntity );
if ( context.generateJakartaDataStaticMetamodel() ) {
final NonManagedMetamodel dataMetaEntity =
NonManagedMetamodel.create( typeElement, context, true,
parentMetadata( parent, context::getDataMetaEntity ) );
context.addDataMetaEntity( dataMetaEntity.getQualifiedName(), dataMetaEntity );
}
}
}
}
if ( isClassRecordOrInterfaceType( element ) ) {
for ( final Element child : element.getEnclosedElements() ) {
if ( isClassRecordOrInterfaceType( child ) ) {
processElement( child, element );
}
}
}
}
private void createMetaModelClasses() {
for ( Metamodel aux : context.getMetaAuxiliaries() ) {
if ( !context.isAlreadyGenerated(aux)
&& !isClassRecordOrInterfaceType( aux.getElement().getEnclosingElement() ) ) {
context.logMessage( Diagnostic.Kind.OTHER,
"Writing metamodel for auxiliary '" + aux + "'" );
ClassWriter.writeFile( aux, context );
context.markGenerated(aux);
}
}
for ( Metamodel entity : context.getMetaEntities() ) {
if ( !context.isAlreadyGenerated( entity ) && !isMemberType( entity.getElement() ) ) {
context.logMessage( Diagnostic.Kind.OTHER,
"Writing Jakarta Persistence metamodel for entity '" + entity + "'" );
ClassWriter.writeFile( entity, context );
context.markGenerated(entity);
}
}
for ( Metamodel entity : context.getDataMetaEntities() ) {
if ( !context.isAlreadyGenerated( entity ) && !isMemberType( entity.getElement() ) ) {
context.logMessage( Diagnostic.Kind.OTHER,
"Writing Jakarta Data metamodel for entity '" + entity + "'" );
ClassWriter.writeFile( entity, context );
context.markGenerated(entity);
}
}
processEmbeddables( context.getMetaEmbeddables() );
processEmbeddables( context.getDataMetaEmbeddables() );
}
/**
* We cannot process the delayed classes in any order.
* There might be dependencies between them.
* We need to process the toplevel classes first.
*/
private void processEmbeddables(Collection<Metamodel> models) {
while ( !models.isEmpty() ) {
final Set<Metamodel> processed = new HashSet<>();
final int toProcessCountBeforeLoop = models.size();
for ( Metamodel metamodel : models ) {
// see METAGEN-36
if ( context.isAlreadyGenerated(metamodel) ) {
processed.add( metamodel );
}
else if ( !modelGenerationNeedsToBeDeferred(models, metamodel ) ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"Writing metamodel for embeddable " + metamodel
);
ClassWriter.writeFile( metamodel, context );
context.markGenerated(metamodel);
processed.add( metamodel );
}
}
models.removeAll( processed );
if ( models.size() >= toProcessCountBeforeLoop ) {
context.logMessage(
Diagnostic.Kind.ERROR,
"Potential endless loop in generation of entities."
);
}
}
}
private boolean modelGenerationNeedsToBeDeferred(Collection<Metamodel> entities, Metamodel containedEntity) {
final Element element = containedEntity.getElement();
if ( element instanceof TypeElement ) {
final ContainsAttributeTypeVisitor visitor =
new ContainsAttributeTypeVisitor( (TypeElement) element, context );
for ( Metamodel entity : entities ) {
if ( !entity.equals( containedEntity ) ) {
final List<? extends Element> enclosedElements =
entity.getElement().getEnclosedElements();
for ( Element subElement : fieldsIn(enclosedElements) ) {
final TypeMirror mirror = subElement.asType();
if ( TypeKind.DECLARED == mirror.getKind() ) {
if ( mirror.accept( visitor, subElement ) ) {
return true;
}
}
}
for ( Element subElement : methodsIn(enclosedElements) ) {
final TypeMirror mirror = subElement.asType();
if ( TypeKind.DECLARED == mirror.getKind() ) {
if ( mirror.accept( visitor, subElement ) ) {
return true;
}
}
}
}
}
}
return false;
}
private static boolean enclosesEntityOrEmbeddable(Element element) {
if ( element instanceof TypeElement typeElement ) {
for ( final Element enclosedElement : typeElement.getEnclosedElements() ) {
if ( isEntityOrEmbeddable( enclosedElement )
|| enclosesEntityOrEmbeddable( enclosedElement ) ) {
return true;
}
}
return false;
}
else {
return false;
}
}
private static boolean isEntityOrEmbeddable(Element element) {
return hasAnnotation(
element,
ENTITY,
MAPPED_SUPERCLASS,
EMBEDDABLE
);
}
private boolean hasAuxiliaryAnnotations(Element element) {
return containsAnnotation(
element,
NAMED_QUERY,
NAMED_QUERIES,
NAMED_NATIVE_QUERY,
NAMED_NATIVE_QUERIES,
SQL_RESULT_SET_MAPPING,
SQL_RESULT_SET_MAPPINGS,
NAMED_ENTITY_GRAPH,
NAMED_ENTITY_GRAPHS,
HIB_NAMED_QUERY,
HIB_NAMED_QUERIES,
HIB_NAMED_NATIVE_QUERY,
HIB_NAMED_NATIVE_QUERIES,
HIB_FETCH_PROFILE,
HIB_FETCH_PROFILES,
HIB_FILTER_DEF,
HIB_FILTER_DEFS
);
}
private void handleRootElementAnnotationMirrors(final Element element, @Nullable Element parent) {
if ( isClassRecordOrInterfaceType( element ) ) {
if ( isEntityOrEmbeddable( element ) ) {
final TypeElement typeElement = (TypeElement) element;
indexEntityName( typeElement );
indexEnumFields( typeElement );
indexQueryInterfaces( typeElement );
final String qualifiedName = typeElement.getQualifiedName().toString();
final Metamodel alreadyExistingMetaEntity =
tryGettingExistingEntityFromContext( typeElement, qualifiedName );
if ( alreadyExistingMetaEntity != null && alreadyExistingMetaEntity.isMetaComplete() ) {
context.logMessage(
Diagnostic.Kind.OTHER,
"Skipping processing of annotations for '" + qualifiedName
+ "' since XML configuration is metadata complete.");
}
else {
final AnnotationMetaEntity parentMetaEntity =
parentMetadata( parent, context::getMetamodel );
final boolean requiresLazyMemberInitialization
= hasAnnotation( element, EMBEDDABLE, MAPPED_SUPERCLASS );
final AnnotationMetaEntity metaEntity =
AnnotationMetaEntity.create( typeElement, context,
requiresLazyMemberInitialization,
true, false, parentMetaEntity, typeElement );
if ( alreadyExistingMetaEntity != null ) {
metaEntity.mergeInMembers( alreadyExistingMetaEntity );
}
addMetamodelToContext( typeElement, metaEntity );
if ( context.generateJakartaDataStaticMetamodel()
// no static metamodel for embeddable classes in Jakarta Data
&& hasAnnotation( element, ENTITY, MAPPED_SUPERCLASS )
// don't generate a Jakarta Data metamodel
// if this entity was partially mapped in XML
&& alreadyExistingMetaEntity == null
// let a handwritten metamodel "override" the generated one
// (this is used in the Jakarta Data TCK)
&& !hasHandwrittenMetamodel(element) ) {
final AnnotationMetaEntity parentDataEntity =
parentMetadata( parent, context::getDataMetaEntity );
final AnnotationMetaEntity dataMetaEntity =
AnnotationMetaEntity.create( typeElement, context,
requiresLazyMemberInitialization,
true, true, parentDataEntity, typeElement );
// final Metamodel alreadyExistingDataMetaEntity =
// tryGettingExistingDataEntityFromContext( mirror, '_' + qualifiedName );
// if ( alreadyExistingDataMetaEntity != null ) {
// dataMetaEntity.mergeInMembers( alreadyExistingDataMetaEntity );
// }
addDataMetamodelToContext( typeElement, dataMetaEntity );
}
}
}
}
}
private static boolean hasHandwrittenMetamodel(Element element) {
return element.getEnclosingElement().getEnclosedElements()
.stream().anyMatch(e -> e.getSimpleName()
.contentEquals('_' + element.getSimpleName().toString()));
}
private void indexQueryInterfaces(TypeElement typeElement) {
for ( Element element : typeElement.getEnclosedElements() ) {
if( element.getKind() == ElementKind.INTERFACE ) {
inspectRootElement( element, typeElement, typeElement );
}
}
}
private void indexEntityName(TypeElement typeElement) {
final AnnotationMirror mirror = getAnnotationMirror( typeElement, ENTITY );
if ( mirror != null ) {
context.addEntityNameMapping( entityName( typeElement, mirror ),
typeElement.getQualifiedName().toString() );
}
}
private static String entityName(TypeElement entityType, AnnotationMirror mirror) {
final String className = entityType.getSimpleName().toString();
final AnnotationValue name = getAnnotationValue(mirror, "name" );
if (name != null) {
final String explicitName = name.getValue().toString();
if ( !explicitName.isEmpty() ) {
return explicitName;
}
}
return className;
}
private void indexEnumFields(TypeElement typeElement) {
for ( Element member : context.getAllMembers(typeElement) ) {
switch ( member.getKind() ) {
case FIELD:
indexEnumValues( member.asType() );
break;
case METHOD:
indexEnumValues( ((ExecutableElement) member).getReturnType() );
break;
}
}
}
private void indexEnumValues(TypeMirror type) {
if ( type.getKind() == TypeKind.DECLARED ) {
final DeclaredType declaredType = (DeclaredType) type;
final TypeElement fieldType = (TypeElement) declaredType.asElement();
if ( fieldType.getKind() == ElementKind.ENUM ) {
for ( Element enumMember : fieldType.getEnclosedElements() ) {
if ( enumMember.getKind() == ElementKind.ENUM_CONSTANT ) {
final Element enclosingElement = fieldType.getEnclosingElement();
final boolean hasOuterType =
enclosingElement.getKind().isClass() || enclosingElement.getKind().isInterface();
context.addEnumValue( fieldType.getQualifiedName().toString(),
fieldType.getSimpleName().toString(),
hasOuterType ? ((TypeElement) enclosingElement).getQualifiedName().toString() : null,
hasOuterType ? enclosingElement.getSimpleName().toString() : null,
enumMember.getSimpleName().toString() );
}
}
}
}
}
private void handleRootElementAuxiliaryAnnotationMirrors(final Element element) {
if ( element instanceof TypeElement ) {
final AnnotationMetaEntity metaEntity =
AnnotationMetaEntity.create( (TypeElement) element, context,
parentMetadata( element.getEnclosingElement(), context::getMetaEntity ) );
context.addMetaAuxiliary( metaEntity.getQualifiedName(), metaEntity );
}
else if ( element instanceof PackageElement ) {
final AnnotationMetaPackage metaEntity =
AnnotationMetaPackage.create( (PackageElement) element, context );
context.addMetaAuxiliary( metaEntity.getQualifiedName(), metaEntity );
}
//TODO: handle PackageElement
}
private @Nullable Metamodel tryGettingExistingEntityFromContext(TypeElement typeElement, String qualifiedName) {
if ( hasAnnotation( typeElement, ENTITY, MAPPED_SUPERCLASS ) ) {
return context.getMetaEntity( qualifiedName );
}
else if ( hasAnnotation( typeElement, EMBEDDABLE ) ) {
return context.getMetaEmbeddable( qualifiedName );
}
return null;
}
private void addMetamodelToContext(TypeElement typeElement, AnnotationMetaEntity entity) {
final String key = entity.getQualifiedName();
if ( hasAnnotation( typeElement, ENTITY ) ) {
context.addMetaEntity( key, entity );
}
else if ( hasAnnotation( typeElement, MAPPED_SUPERCLASS ) ) {
context.addMetaEntity( key, entity );
}
else if ( hasAnnotation( typeElement, EMBEDDABLE ) ) {
context.addMetaEmbeddable( key, entity );
}
}
private void addDataMetamodelToContext(TypeElement typeElement, AnnotationMetaEntity entity) {
final String key = entity.getQualifiedName();
if ( hasAnnotation( typeElement, ENTITY ) ) {
context.addDataMetaEntity( key, entity );
}
else if ( hasAnnotation( typeElement, MAPPED_SUPERCLASS ) ) {
context.addDataMetaEntity( key, entity );
}
else if ( hasAnnotation( typeElement, EMBEDDABLE ) ) {
context.addDataMetaEmbeddable( key, entity );
}
}
private void writeIndex() {
if ( context.isIndexing() ) {
final ProcessingEnvironment processingEnvironment = context.getProcessingEnvironment();
final Elements elementUtils = processingEnvironment.getElementUtils();
context.getEntityNameMappings().forEach( (entityName, className) -> {
try (Writer writer = processingEnvironment.getFiler()
.createResource(
StandardLocation.SOURCE_OUTPUT,
ENTITY_INDEX,
entityName,
elementUtils.getTypeElement( className )
)
.openWriter()) {
writer.append( className );
}
catch (IOException e) {
processingEnvironment.getMessager()
.printMessage( Diagnostic.Kind.WARNING,
"could not write entity index " + e.getMessage() );
}
} );
context.getEnumTypesByValue().forEach( (valueName, enumTypeNames) -> {
try (Writer writer = processingEnvironment.getFiler()
.createResource(
StandardLocation.SOURCE_OUTPUT,
ENTITY_INDEX,
'.' + valueName,
elementUtils.getTypeElement( enumTypeNames.iterator().next() )
)
.openWriter()) {
for ( String enumTypeName : enumTypeNames ) {
writer.append( enumTypeName ).append( " " );
}
}
catch (IOException e) {
processingEnvironment.getMessager()
.printMessage( Diagnostic.Kind.WARNING,
"could not write entity index " + e.getMessage() );
}
} );
}
}
}
|
apache/ozone | 37,010 | hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOmContainerLocationCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.om;
import static com.google.common.collect.Sets.newHashSet;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.CompletableFuture.completedFuture;
import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes.NO_REPLICA_FOUND;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_KEY_PREALLOCATION_BLOCKS_MAX;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableMap;
import jakarta.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hdds.HddsConfigKeys;
import org.apache.hadoop.hdds.client.ContainerBlockID;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.DatanodeID;
import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.BlockData;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChecksumType;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.GetBlockResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.GetCommittedBlockLengthResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.PutBlockResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ReadChunkResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.WriteChunkResponseProto;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
import org.apache.hadoop.hdds.scm.XceiverClientFactory;
import org.apache.hadoop.hdds.scm.XceiverClientGrpc;
import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientReply;
import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.container.common.helpers.ExcludeList;
import org.apache.hadoop.hdds.scm.net.InnerNode;
import org.apache.hadoop.hdds.scm.net.InnerNodeImpl;
import org.apache.hadoop.hdds.scm.net.NetConstants;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.client.ObjectStore;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneKeyDetails;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.client.rpc.RpcClient;
import org.apache.hadoop.ozone.common.Checksum;
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
import org.apache.hadoop.ozone.om.helpers.ServiceInfoEx;
import org.apache.hadoop.ozone.om.request.OMRequestTestUtils;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.apache.ratis.thirdparty.io.grpc.Status;
import org.apache.ratis.thirdparty.io.grpc.StatusException;
import org.apache.ratis.thirdparty.io.grpc.StatusRuntimeException;
import org.apache.ratis.util.ExitUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.mockito.ArgumentMatcher;
/**
* This class includes the integration test-cases to verify the integration
* between client and OM to keep container location cache eventually
* consistent. For example, when clients facing particular errors reading data
* from datanodes, they should inform OM to refresh location cache and OM
* should in turn contact SCM to get the updated container location.
*
* This integration verifies clients and OM using mocked Datanode and SCM
* protocols.
*/
public class TestOmContainerLocationCache {
@TempDir
private static File dir;
private static ScmBlockLocationProtocol mockScmBlockLocationProtocol;
private static StorageContainerLocationProtocol mockScmContainerClient;
private static OzoneConfiguration conf;
private static OMMetadataManager metadataManager;
private static final String BUCKET_NAME = "bucket1";
private static final String VERSIONED_BUCKET_NAME = "versionedBucket1";
private static final String VOLUME_NAME = "vol1";
private static OzoneManager om;
private static ObjectStore objectStore;
private static XceiverClientGrpc mockDn1Protocol;
private static XceiverClientGrpc mockDn2Protocol;
private static XceiverClientGrpc mockDnEcProtocol;
private static final DatanodeDetails DN1 =
MockDatanodeDetails.createDatanodeDetails(DatanodeID.randomID());
private static final DatanodeDetails DN2 =
MockDatanodeDetails.createDatanodeDetails(DatanodeID.randomID());
private static final DatanodeDetails DN3 =
MockDatanodeDetails.createDatanodeDetails(DatanodeID.randomID());
private static final DatanodeDetails DN4 =
MockDatanodeDetails.createDatanodeDetails(DatanodeID.randomID());
private static final DatanodeDetails DN5 =
MockDatanodeDetails.createDatanodeDetails(DatanodeID.randomID());
private static final AtomicLong CONTAINER_ID = new AtomicLong(1);
private static OzoneClient ozoneClient;
@BeforeAll
public static void setUp() throws Exception {
ExitUtils.disableSystemExit();
conf = new OzoneConfiguration();
conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, dir.toString());
conf.set(OzoneConfigKeys.OZONE_NETWORK_TOPOLOGY_AWARE_READ_KEY, "true");
conf.setLong(OZONE_KEY_PREALLOCATION_BLOCKS_MAX, 10);
mockScmBlockLocationProtocol = mock(ScmBlockLocationProtocol.class);
mockScmContainerClient =
mock(StorageContainerLocationProtocol.class);
InnerNode.Factory factory = InnerNodeImpl.FACTORY;
when(mockScmBlockLocationProtocol.getNetworkTopology()).thenReturn(
factory.newInnerNode("", "", null, NetConstants.ROOT_LEVEL, 1));
OmTestManagers omTestManagers = new OmTestManagers(conf,
mockScmBlockLocationProtocol, mockScmContainerClient);
om = omTestManagers.getOzoneManager();
ozoneClient = omTestManagers.getRpcClient();
metadataManager = omTestManagers.getMetadataManager();
RpcClient rpcClient = new RpcClient(conf, null) {
@Nonnull
@Override
protected XceiverClientFactory createXceiverClientFactory(
ServiceInfoEx serviceInfo) throws IOException {
return mockDataNodeClientFactory();
}
};
objectStore = new ObjectStore(conf, rpcClient);
createVolume(VOLUME_NAME);
createBucket(VOLUME_NAME, BUCKET_NAME, false);
createBucket(VOLUME_NAME, VERSIONED_BUCKET_NAME, true);
}
@AfterAll
public static void cleanup() throws Exception {
ozoneClient.close();
om.stop();
}
private static XceiverClientManager mockDataNodeClientFactory()
throws IOException {
mockDn1Protocol = spy(new XceiverClientGrpc(createPipeline(DN1), conf));
mockDn2Protocol = spy(new XceiverClientGrpc(createPipeline(DN2), conf));
mockDnEcProtocol = spy(new XceiverClientGrpc(createEcPipeline(
ImmutableMap.of(DN1, 1, DN2, 2, DN3, 3, DN4, 4, DN5, 5)), conf));
XceiverClientManager manager = mock(XceiverClientManager.class);
when(manager.acquireClient(argThat(matchEmptyPipeline())))
.thenCallRealMethod();
when(manager.acquireClient(argThat(matchEmptyPipeline()),
anyBoolean())).thenCallRealMethod();
when(manager.acquireClientForReadData(argThat(matchEmptyPipeline())))
.thenCallRealMethod();
when(manager.acquireClient(argThat(matchPipeline(DN1))))
.thenReturn(mockDn1Protocol);
when(manager.acquireClientForReadData(argThat(matchPipeline(DN1))))
.thenReturn(mockDn1Protocol);
when(manager.acquireClient(argThat(matchPipeline(DN2))))
.thenReturn(mockDn2Protocol);
when(manager.acquireClientForReadData(argThat(matchPipeline(DN2))))
.thenReturn(mockDn2Protocol);
when(manager.acquireClient(argThat(matchEcPipeline())))
.thenReturn(mockDnEcProtocol);
when(manager.acquireClientForReadData(argThat(matchEcPipeline())))
.thenReturn(mockDnEcProtocol);
return manager;
}
private static ArgumentMatcher<Pipeline> matchEmptyPipeline() {
return argument -> argument != null
&& argument.getNodes().isEmpty();
}
private static ArgumentMatcher<Pipeline> matchPipeline(DatanodeDetails dn) {
return argument -> argument != null && !argument.getNodes().isEmpty()
&& argument.getNodes().get(0).getUuid().equals(dn.getUuid());
}
private static ArgumentMatcher<Pipeline> matchEcPipeline() {
return argument -> argument != null && !argument.getNodes().isEmpty()
&& argument.getReplicationConfig() instanceof ECReplicationConfig;
}
private static void createBucket(String volumeName, String bucketName,
boolean isVersionEnabled)
throws IOException {
OmBucketInfo bucketInfo = OmBucketInfo.newBuilder()
.setVolumeName(volumeName)
.setBucketName(bucketName)
.setIsVersionEnabled(isVersionEnabled)
.build();
OMRequestTestUtils.addBucketToOM(metadataManager, bucketInfo);
}
private static void createVolume(String volumeName) throws IOException {
OmVolumeArgs volumeArgs = OmVolumeArgs.newBuilder()
.setVolume(volumeName)
.setAdminName("bilbo")
.setOwnerName("bilbo")
.build();
OMRequestTestUtils.addVolumeToOM(metadataManager, volumeArgs);
}
@BeforeEach
public void beforeEach() throws IOException {
CONTAINER_ID.getAndIncrement();
reset(mockScmBlockLocationProtocol, mockScmContainerClient,
mockDn1Protocol, mockDn2Protocol, mockDnEcProtocol);
InnerNode.Factory factory = InnerNodeImpl.FACTORY;
when(mockScmBlockLocationProtocol.getNetworkTopology()).thenReturn(
factory.newInnerNode("", "", null, NetConstants.ROOT_LEVEL, 1));
when(mockDn1Protocol.getPipeline()).thenReturn(createPipeline(DN1));
when(mockDn2Protocol.getPipeline()).thenReturn(createPipeline(DN2));
when(mockDnEcProtocol.getPipeline()).thenReturn(createEcPipeline(
ImmutableMap.of(DN1, 1, DN2, 2, DN3, 3, DN4, 4, DN5, 5)));
}
/**
* Verify that in a happy case, container location is cached and reused
* in OM.
*/
@Test
public void containerCachedInHappyCase() throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
// Create keyName1.
String keyName1 = "key1";
try (OzoneOutputStream os = bucket.createKey(keyName1, data.length)) {
IOUtils.write(data, os);
}
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
// Read keyName1.
OzoneKeyDetails key1 = bucket.getKey(keyName1);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
try (InputStream is = key1.getContent()) {
byte[] read = new byte[(int) key1.getDataSize()];
IOUtils.read(is, read);
assertArrayEquals(data, read);
}
// Create keyName2 in the same container to reuse the cache
String keyName2 = "key2";
try (OzoneOutputStream os = bucket.createKey(keyName2, data.length)) {
IOUtils.write(data, os);
}
// Read keyName2.
OzoneKeyDetails key2 = bucket.getKey(keyName2);
try (InputStream is = key2.getContent()) {
byte[] read = new byte[(int) key2.getDataSize()];
IOUtils.read(is, read);
assertArrayEquals(data, read);
}
// Ensure SCM is not called once again.
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
private static Stream<Arguments> errorsTriggerRefresh() {
return Stream.of(
Arguments.of(null, Result.CLOSED_CONTAINER_IO),
Arguments.of(null, Result.CONTAINER_NOT_FOUND),
Arguments.of(new StatusException(Status.UNAVAILABLE), null),
Arguments.of(new StatusRuntimeException(Status.UNAVAILABLE), null)
);
}
private static Stream<Arguments> errorsNotTriggerRefresh() {
return Stream.of(
Arguments.of(new StatusException(Status.UNAUTHENTICATED), null,
SCMSecurityException.class),
Arguments.of(new IOException("Any random IO exception."), null,
IOException.class)
);
}
/**
* Verify that in case a client got errors calling datanodes GetBlock,
* the client correctly requests OM to refresh relevant container location
* from SCM.
*/
@ParameterizedTest
@MethodSource("errorsTriggerRefresh")
public void containerRefreshedAfterDatanodeGetBlockError(
Exception dnException, Result dnResponseCode) throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
String keyName = "key";
try (OzoneOutputStream os = bucket.createKey(keyName, data.length)) {
IOUtils.write(data, os);
}
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
OzoneKeyDetails key1 = bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
try (InputStream is = key1.getContent()) {
// Simulate dn1 got errors, and the container's moved to dn2.
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, null,
dnException, dnResponseCode);
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN2);
mockGetBlock(mockDn2Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn2Protocol, CONTAINER_ID.get(), 1L, data, null, null);
byte[] read = new byte[(int) key1.getDataSize()];
IOUtils.read(is, read);
assertArrayEquals(data, read);
}
// verify SCM is called one more time to refresh.
verify(mockScmContainerClient, times(2))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
/**
* Verify that in case a client got errors datanodes ReadChunk,the client
* correctly requests OM to refresh relevant container location from
* SCM.
*/
@ParameterizedTest
@MethodSource("errorsTriggerRefresh")
public void containerRefreshedAfterDatanodeReadChunkError(
Exception dnException, Result dnResponseCode) throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
String keyName = "key";
try (OzoneOutputStream os = bucket.createKey(keyName, data.length)) {
IOUtils.write(data, os);
}
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
OzoneKeyDetails key1 = bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
try (InputStream is = key1.getContent()) {
// simulate dn1 goes down, the container's to dn2.
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn1Protocol, CONTAINER_ID.get(), 1L, null,
dnException, dnResponseCode);
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN2);
mockGetBlock(mockDn2Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn2Protocol, CONTAINER_ID.get(), 1L, data, null, null);
byte[] read = new byte[(int) key1.getDataSize()];
IOUtils.read(is, read);
assertArrayEquals(data, read);
}
// verify SCM is called one more time to refresh.
verify(mockScmContainerClient, times(2))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
/**
* Verify that in case a client got particular errors datanodes GetBlock,
* the client fails correctly fast and don't invoke cache refresh.
*/
@ParameterizedTest
@MethodSource("errorsNotTriggerRefresh")
public void containerNotRefreshedAfterDatanodeGetBlockError(
Exception ex, Result errorCode, Class<? extends Exception> expectedEx)
throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
String keyName = "key";
try (OzoneOutputStream os = bucket.createKey(keyName, data.length)) {
IOUtils.write(data, os);
}
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
OzoneKeyDetails key1 = bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
try (InputStream is = key1.getContent()) {
// simulate dn1 got errors, and the container's moved to dn2.
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, null, ex,
errorCode);
assertThrows(expectedEx,
() -> IOUtils.read(is, new byte[(int) key1.getDataSize()]));
}
// verify SCM is called one more time to refresh.
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
/**
* Verify that in case a client got particular errors datanodes ReadChunk,
* the client fails correctly fast and don't invoke cache refresh.
*/
@ParameterizedTest
@MethodSource("errorsNotTriggerRefresh")
public void containerNotRefreshedAfterDatanodeReadChunkError(
Exception dnException, Result dnResponseCode,
Class<? extends Exception> expectedEx) throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
String keyName = "key";
try (OzoneOutputStream os = bucket.createKey(keyName, data.length)) {
IOUtils.write(data, os);
}
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
OzoneKeyDetails key1 = bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
try (InputStream is = key1.getContent()) {
// simulate dn1 got errors, and the container's moved to dn2.
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn1Protocol, CONTAINER_ID.get(), 1L, null,
dnException, dnResponseCode);
assertThrows(expectedEx,
() -> IOUtils.read(is, new byte[(int) key1.getDataSize()]));
}
// verify SCM is called one more time to refresh.
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
/**
* Verify that in situation that SCM returns empty pipelines (that prevents
* clients from reading data), the empty pipelines are not cached and
* subsequent key reads re-fetch container data from SCM.
*/
@Test
public void containerRefreshedOnEmptyPipelines() throws Exception {
byte[] data = "Test content".getBytes(UTF_8);
mockScmAllocationOnDn1(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDn1Protocol);
mockPutBlockResponse(mockDn1Protocol, CONTAINER_ID.get(), 1L, data);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME)
.getBucket(BUCKET_NAME);
String keyName = "key";
try (OzoneOutputStream os = bucket.createKey(keyName, data.length)) {
IOUtils.write(data, os);
}
// All datanodes go down and scm returns empty pipeline for the container.
mockScmGetContainerPipelineEmpty(CONTAINER_ID.get());
OzoneKeyDetails key1 = bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
// verify that the effort to read will result in a NO_REPLICA_FOUND error.
Exception ex =
assertThrows(IllegalArgumentException.class, () -> {
try (InputStream is = key1.getContent()) {
IOUtils.read(is, new byte[(int) key1.getDataSize()]);
}
});
assertEquals(NO_REPLICA_FOUND.toString(), ex.getMessage());
// but the empty pipeline is not cached, and when some data node is back.
mockScmGetContainerPipeline(CONTAINER_ID.get(), DN1);
mockGetBlock(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
mockReadChunk(mockDn1Protocol, CONTAINER_ID.get(), 1L, data, null, null);
// the subsequent effort to read the key is success.
OzoneKeyDetails updatedKey1 = bucket.getKey(keyName);
try (InputStream is = updatedKey1.getContent()) {
byte[] read = new byte[(int) key1.getDataSize()];
IOUtils.read(is, read);
assertArrayEquals(data, read);
}
// verify SCM is called one more time to refetch the container pipeline..
verify(mockScmContainerClient, times(2))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
@Test
public void containerRefreshedOnInsufficientEcPipelines() throws Exception {
int chunkSize = 1024 * 1024;
int dataBlocks = 3;
int parityBlocks = 2;
int inputSize = chunkSize * dataBlocks;
byte[][] inputChunks = new byte[dataBlocks][chunkSize];
mockScmAllocationEcPipeline(CONTAINER_ID.get(), 1L);
mockWriteChunkResponse(mockDnEcProtocol);
mockPutBlockResponse(mockDnEcProtocol, CONTAINER_ID.get(), 1L, null);
OzoneBucket bucket = objectStore.getVolume(VOLUME_NAME).getBucket(BUCKET_NAME);
String keyName = "ecKey";
try (OzoneOutputStream os = bucket.createKey(keyName, inputSize,
new ECReplicationConfig(dataBlocks, parityBlocks, ECReplicationConfig.EcCodec.RS,
chunkSize), new HashMap<>())) {
for (int i = 0; i < dataBlocks; i++) {
os.write(inputChunks[i]);
}
}
// case1: pipeline replicaIndexes missing some data indexes, should not cache
mockScmGetContainerEcPipeline(CONTAINER_ID.get(), ImmutableMap.of(DN1, 1, DN2, 2, DN4, 4));
bucket.getKey(keyName);
verify(mockScmContainerClient, times(1))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
bucket.getKey(keyName);
verify(mockScmContainerClient, times(2))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
// case2: pipeline replicaIndexes contain all data indexes, should cache
mockScmGetContainerEcPipeline(CONTAINER_ID.get(), ImmutableMap.of(DN1, 1, DN2, 2, DN3, 3, DN4, 4));
bucket.getKey(keyName);
verify(mockScmContainerClient, times(3))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
bucket.getKey(keyName);
verify(mockScmContainerClient, times(3))
.getContainerWithPipelineBatch(newHashSet(CONTAINER_ID.get()));
}
private void mockPutBlockResponse(XceiverClientSpi mockDnProtocol,
long containerId, long localId,
byte[] data)
throws IOException, ExecutionException, InterruptedException {
GetCommittedBlockLengthResponseProto build =
GetCommittedBlockLengthResponseProto.newBuilder()
.setBlockLength(8)
.setBlockID(createBlockId(containerId, localId))
.build();
ContainerCommandResponseProto putResponse =
ContainerCommandResponseProto.newBuilder()
.setPutBlock(PutBlockResponseProto.newBuilder()
.setCommittedBlockLength(build).build())
.setResult(Result.SUCCESS)
.setCmdType(Type.PutBlock)
.build();
doAnswer(invocation ->
new XceiverClientReply(completedFuture(putResponse)))
.when(mockDnProtocol)
.sendCommandAsync(argThat(matchCmd(Type.PutBlock)));
}
@Nonnull
private ContainerProtos.DatanodeBlockID createBlockId(long containerId,
long localId) {
return ContainerProtos.DatanodeBlockID.newBuilder()
.setContainerID(containerId)
.setLocalID(localId).build();
}
private void mockWriteChunkResponse(XceiverClientSpi mockDnProtocol)
throws IOException, ExecutionException, InterruptedException {
doAnswer(invocation ->
new XceiverClientReply(
completedFuture(
createWriteChunkResponse(
(ContainerCommandRequestProto)invocation.getArgument(0)))))
.when(mockDnProtocol)
.sendCommandAsync(argThat(matchCmd(Type.WriteChunk)));
}
ContainerCommandResponseProto createWriteChunkResponse(
ContainerCommandRequestProto request) {
ContainerProtos.WriteChunkRequestProto writeChunk = request.getWriteChunk();
WriteChunkResponseProto.Builder builder =
WriteChunkResponseProto.newBuilder();
if (writeChunk.hasBlock()) {
ContainerProtos.BlockData
blockData = writeChunk.getBlock().getBlockData();
GetCommittedBlockLengthResponseProto response =
GetCommittedBlockLengthResponseProto.newBuilder()
.setBlockID(blockData.getBlockID())
.setBlockLength(blockData.getSize())
.build();
builder.setCommittedBlockLength(response);
}
return ContainerCommandResponseProto.newBuilder()
.setWriteChunk(builder.build())
.setResult(Result.SUCCESS)
.setCmdType(Type.WriteChunk)
.build();
}
private ArgumentMatcher<ContainerCommandRequestProto> matchCmd(Type type) {
return argument -> argument != null && argument.getCmdType() == type;
}
private void mockScmAllocationOnDn1(long containerID,
long localId) throws IOException {
ContainerBlockID blockId = new ContainerBlockID(containerID, localId);
AllocatedBlock block = new AllocatedBlock.Builder()
.setPipeline(createPipeline(DN1))
.setContainerBlockID(blockId)
.build();
when(mockScmBlockLocationProtocol
.allocateBlock(anyLong(), anyInt(),
any(ReplicationConfig.class),
anyString(),
any(ExcludeList.class),
anyString()))
.thenReturn(Collections.singletonList(block));
}
private void mockScmAllocationEcPipeline(long containerID, long localId)
throws IOException {
ContainerBlockID blockId = new ContainerBlockID(containerID, localId);
AllocatedBlock block = new AllocatedBlock.Builder()
.setPipeline(createEcPipeline(ImmutableMap.of(DN1, 1, DN2, 2, DN3, 3, DN4, 4, DN5, 5)))
.setContainerBlockID(blockId)
.build();
when(mockScmBlockLocationProtocol
.allocateBlock(anyLong(), anyInt(),
any(ECReplicationConfig.class),
anyString(),
any(ExcludeList.class),
anyString()))
.thenReturn(Collections.singletonList(block));
}
private void mockScmGetContainerPipeline(long containerId,
DatanodeDetails dn)
throws IOException {
Pipeline pipeline = createPipeline(dn);
ContainerInfo containerInfo = new ContainerInfo.Builder()
.setContainerID(containerId)
.setPipelineID(pipeline.getId()).build();
List<ContainerWithPipeline> containerWithPipelines =
Collections.singletonList(
new ContainerWithPipeline(containerInfo, pipeline));
when(mockScmContainerClient.getContainerWithPipelineBatch(
newHashSet(containerId))).thenReturn(containerWithPipelines);
}
private void mockScmGetContainerPipelineEmpty(long containerId)
throws IOException {
Pipeline pipeline = createPipeline(Collections.emptyList());
ContainerInfo containerInfo = new ContainerInfo.Builder()
.setContainerID(containerId)
.setPipelineID(pipeline.getId()).build();
List<ContainerWithPipeline> containerWithPipelines =
Collections.singletonList(
new ContainerWithPipeline(containerInfo, pipeline));
when(mockScmContainerClient.getContainerWithPipelineBatch(
newHashSet(containerId))).thenReturn(containerWithPipelines);
}
private void mockScmGetContainerEcPipeline(long containerId, Map<DatanodeDetails, Integer> indexes)
throws IOException {
Pipeline pipeline = createEcPipeline(indexes);
ContainerInfo containerInfo = new ContainerInfo.Builder()
.setContainerID(containerId)
.setPipelineID(pipeline.getId()).build();
List<ContainerWithPipeline> containerWithPipelines =
Collections.singletonList(
new ContainerWithPipeline(containerInfo, pipeline));
when(mockScmContainerClient.getContainerWithPipelineBatch(
newHashSet(containerId))).thenReturn(containerWithPipelines);
}
private void mockGetBlock(XceiverClientGrpc mockDnProtocol,
long containerId, long localId,
byte[] data,
Exception exception,
Result errorCode) throws Exception {
final CompletableFuture<ContainerCommandResponseProto> response;
if (exception != null) {
response = new CompletableFuture<>();
response.completeExceptionally(exception);
} else if (errorCode != null) {
ContainerCommandResponseProto getBlockResp =
ContainerCommandResponseProto.newBuilder()
.setResult(errorCode)
.setCmdType(Type.GetBlock)
.build();
response = completedFuture(getBlockResp);
} else {
ContainerCommandResponseProto getBlockResp =
ContainerCommandResponseProto.newBuilder()
.setGetBlock(GetBlockResponseProto.newBuilder()
.setBlockData(BlockData.newBuilder()
.addChunks(createChunkInfo(data))
.setBlockID(createBlockId(containerId, localId))
.build())
.build()
)
.setResult(Result.SUCCESS)
.setCmdType(Type.GetBlock)
.build();
response = completedFuture(getBlockResp);
}
doAnswer(invocation -> new XceiverClientReply(response))
.when(mockDnProtocol)
.sendCommandAsync(argThat(matchCmd(Type.GetBlock)), any());
}
@Nonnull
private ChunkInfo createChunkInfo(byte[] data) throws Exception {
Checksum checksum = new Checksum(ChecksumType.CRC32, 4);
return ChunkInfo.newBuilder()
.setOffset(0)
.setLen(data.length)
.setChunkName("chunk1")
.setChecksumData(checksum.computeChecksum(data).getProtoBufMessage())
.build();
}
private void mockReadChunk(XceiverClientGrpc mockDnProtocol,
long containerId, long localId,
byte[] data,
Exception exception,
Result errorCode) throws Exception {
final CompletableFuture<ContainerCommandResponseProto> response;
if (exception != null) {
response = new CompletableFuture<>();
response.completeExceptionally(exception);
} else if (errorCode != null) {
ContainerCommandResponseProto readChunkResp =
ContainerCommandResponseProto.newBuilder()
.setResult(errorCode)
.setCmdType(Type.ReadChunk)
.build();
response = completedFuture(readChunkResp);
} else {
ContainerCommandResponseProto readChunkResp =
ContainerCommandResponseProto.newBuilder()
.setReadChunk(ReadChunkResponseProto.newBuilder()
.setBlockID(createBlockId(containerId, localId))
.setChunkData(createChunkInfo(data))
.setData(ByteString.copyFrom(data))
.build()
)
.setResult(Result.SUCCESS)
.setCmdType(Type.ReadChunk)
.build();
response = completedFuture(readChunkResp);
}
doAnswer(invocation -> new XceiverClientReply(response))
.when(mockDnProtocol)
.sendCommandAsync(argThat(matchCmd(Type.ReadChunk)), any());
}
private static Pipeline createPipeline(DatanodeDetails dn) {
return createPipeline(Collections.singletonList(dn));
}
private static Pipeline createPipeline(List<DatanodeDetails> nodes) {
return Pipeline.newBuilder()
.setState(Pipeline.PipelineState.OPEN)
.setId(PipelineID.randomId())
.setReplicationConfig(
RatisReplicationConfig.getInstance(ReplicationFactor.THREE))
.setNodes(nodes)
.build();
}
private static Pipeline createEcPipeline(Map<DatanodeDetails, Integer> indexes) {
return Pipeline.newBuilder()
.setState(Pipeline.PipelineState.OPEN)
.setId(PipelineID.randomId())
.setReplicationConfig(new ECReplicationConfig(3, 2))
.setReplicaIndexes(indexes)
.setNodes(new ArrayList<>(indexes.keySet()))
.build();
}
}
|
googleapis/google-api-java-client-services | 36,824 | clients/google-api-services-videointelligence/v1/1.26.0/com/google/api/services/videointelligence/v1/CloudVideoIntelligence.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.videointelligence.v1;
/**
* Service definition for CloudVideoIntelligence (v1).
*
* <p>
* Detects objects, explicit content, and scene changes in videos. It also specifies the region for annotation and transcribes speech to text. Supports both asynchronous API and streaming API.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/video-intelligence/docs/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link CloudVideoIntelligenceRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class CloudVideoIntelligence extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.26.0 of the Cloud Video Intelligence API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://videointelligence.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public CloudVideoIntelligence(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
CloudVideoIntelligence(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Operations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Operations.List request = videointelligence.operations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Operations operations() {
return new Operations();
}
/**
* The "operations" collection of methods.
*/
public class Operations {
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort to
* cancel the operation, but success is not guaranteed. If the server doesn't support this method,
* it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other
* methods to check whether the cancellation succeeded or whether the operation completed despite
* cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an
* operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to
* `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Cancel#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @return the request
*/
public Cancel cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) throws java.io.IOException {
Cancel result = new Cancel(name, content);
initialize(result);
return result;
}
public class Cancel extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}:cancel";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort
* to cancel the operation, but success is not guaranteed. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or
* other methods to check whether the cancellation succeeded or whether the operation completed
* despite cancellation. On successful cancellation, the operation is not deleted; instead, it
* becomes an operation with an Operation.error value with a google.rpc.Status.code of 1,
* corresponding to `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Cancel#execute()} method to invoke the remote
* operation. <p> {@link
* Cancel#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @since 1.13
*/
protected Cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Cancel set$Xgafv(java.lang.String $Xgafv) {
return (Cancel) super.set$Xgafv($Xgafv);
}
@Override
public Cancel setAccessToken(java.lang.String accessToken) {
return (Cancel) super.setAccessToken(accessToken);
}
@Override
public Cancel setAlt(java.lang.String alt) {
return (Cancel) super.setAlt(alt);
}
@Override
public Cancel setCallback(java.lang.String callback) {
return (Cancel) super.setCallback(callback);
}
@Override
public Cancel setFields(java.lang.String fields) {
return (Cancel) super.setFields(fields);
}
@Override
public Cancel setKey(java.lang.String key) {
return (Cancel) super.setKey(key);
}
@Override
public Cancel setOauthToken(java.lang.String oauthToken) {
return (Cancel) super.setOauthToken(oauthToken);
}
@Override
public Cancel setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Cancel) super.setPrettyPrint(prettyPrint);
}
@Override
public Cancel setQuotaUser(java.lang.String quotaUser) {
return (Cancel) super.setQuotaUser(quotaUser);
}
@Override
public Cancel setUploadType(java.lang.String uploadType) {
return (Cancel) super.setUploadType(uploadType);
}
@Override
public Cancel setUploadProtocol(java.lang.String uploadProtocol) {
return (Cancel) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be cancelled. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be cancelled.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be cancelled. */
public Cancel setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Cancel set(String parameterName, Object value) {
return (Cancel) super.set(parameterName, value);
}
}
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Delete#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be deleted.
* @return the request
*/
public Delete delete(java.lang.String name) throws java.io.IOException {
Delete result = new Delete(name);
initialize(result);
return result;
}
public class Delete extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Delete#execute()} method to invoke the remote
* operation. <p> {@link
* Delete#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be deleted.
* @since 1.13
*/
protected Delete(java.lang.String name) {
super(CloudVideoIntelligence.this, "DELETE", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Delete set$Xgafv(java.lang.String $Xgafv) {
return (Delete) super.set$Xgafv($Xgafv);
}
@Override
public Delete setAccessToken(java.lang.String accessToken) {
return (Delete) super.setAccessToken(accessToken);
}
@Override
public Delete setAlt(java.lang.String alt) {
return (Delete) super.setAlt(alt);
}
@Override
public Delete setCallback(java.lang.String callback) {
return (Delete) super.setCallback(callback);
}
@Override
public Delete setFields(java.lang.String fields) {
return (Delete) super.setFields(fields);
}
@Override
public Delete setKey(java.lang.String key) {
return (Delete) super.setKey(key);
}
@Override
public Delete setOauthToken(java.lang.String oauthToken) {
return (Delete) super.setOauthToken(oauthToken);
}
@Override
public Delete setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Delete) super.setPrettyPrint(prettyPrint);
}
@Override
public Delete setQuotaUser(java.lang.String quotaUser) {
return (Delete) super.setQuotaUser(quotaUser);
}
@Override
public Delete setUploadType(java.lang.String uploadType) {
return (Delete) super.setUploadType(uploadType);
}
@Override
public Delete setUploadProtocol(java.lang.String uploadProtocol) {
return (Delete) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be deleted. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be deleted.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be deleted. */
public Delete setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Delete set(String parameterName, Object value) {
return (Delete) super.set(parameterName, value);
}
}
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource.
* @return the request
*/
public Get get(java.lang.String name) throws java.io.IOException {
Get result = new Get(name);
initialize(result);
return result;
}
public class Get extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
* <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource.
* @since 1.13
*/
protected Get(java.lang.String name) {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource. */
public Get setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a binding
* such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse> {
private static final String REST_PATH = "v1/operations";
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a
* binding such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link List#execute()} method to invoke the remote operation.
* <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** The standard list filter. */
@com.google.api.client.util.Key
private java.lang.String filter;
/** The standard list filter.
*/
public java.lang.String getFilter() {
return filter;
}
/** The standard list filter. */
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/** The name of the operation's parent resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation's parent resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation's parent resource. */
public List setName(java.lang.String name) {
this.name = name;
return this;
}
/** The standard list page size. */
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The standard list page size.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/** The standard list page size. */
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/** The standard list page token. */
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** The standard list page token.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/** The standard list page token. */
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Videos collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Videos.List request = videointelligence.videos().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Videos videos() {
return new Videos();
}
/**
* The "videos" collection of methods.
*/
public class Videos {
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains `AnnotateVideoProgress`
* (progress). `Operation.response` contains `AnnotateVideoResponse` (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Annotate#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @return the request
*/
public Annotate annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) throws java.io.IOException {
Annotate result = new Annotate(content);
initialize(result);
return result;
}
public class Annotate extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/videos:annotate";
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains
* `AnnotateVideoProgress` (progress). `Operation.response` contains `AnnotateVideoResponse`
* (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Annotate#execute()} method to invoke the remote
* operation. <p> {@link
* Annotate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @since 1.13
*/
protected Annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
}
@Override
public Annotate set$Xgafv(java.lang.String $Xgafv) {
return (Annotate) super.set$Xgafv($Xgafv);
}
@Override
public Annotate setAccessToken(java.lang.String accessToken) {
return (Annotate) super.setAccessToken(accessToken);
}
@Override
public Annotate setAlt(java.lang.String alt) {
return (Annotate) super.setAlt(alt);
}
@Override
public Annotate setCallback(java.lang.String callback) {
return (Annotate) super.setCallback(callback);
}
@Override
public Annotate setFields(java.lang.String fields) {
return (Annotate) super.setFields(fields);
}
@Override
public Annotate setKey(java.lang.String key) {
return (Annotate) super.setKey(key);
}
@Override
public Annotate setOauthToken(java.lang.String oauthToken) {
return (Annotate) super.setOauthToken(oauthToken);
}
@Override
public Annotate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Annotate) super.setPrettyPrint(prettyPrint);
}
@Override
public Annotate setQuotaUser(java.lang.String quotaUser) {
return (Annotate) super.setQuotaUser(quotaUser);
}
@Override
public Annotate setUploadType(java.lang.String uploadType) {
return (Annotate) super.setUploadType(uploadType);
}
@Override
public Annotate setUploadProtocol(java.lang.String uploadProtocol) {
return (Annotate) super.setUploadProtocol(uploadProtocol);
}
@Override
public Annotate set(String parameterName, Object value) {
return (Annotate) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link CloudVideoIntelligence}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link CloudVideoIntelligence}. */
@Override
public CloudVideoIntelligence build() {
return new CloudVideoIntelligence(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link CloudVideoIntelligenceRequestInitializer}.
*
* @since 1.12
*/
public Builder setCloudVideoIntelligenceRequestInitializer(
CloudVideoIntelligenceRequestInitializer cloudvideointelligenceRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(cloudvideointelligenceRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-api-java-client-services | 36,824 | clients/google-api-services-videointelligence/v1/1.27.0/com/google/api/services/videointelligence/v1/CloudVideoIntelligence.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.videointelligence.v1;
/**
* Service definition for CloudVideoIntelligence (v1).
*
* <p>
* Detects objects, explicit content, and scene changes in videos. It also specifies the region for annotation and transcribes speech to text. Supports both asynchronous API and streaming API.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/video-intelligence/docs/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link CloudVideoIntelligenceRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class CloudVideoIntelligence extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.27.0 of the Cloud Video Intelligence API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://videointelligence.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public CloudVideoIntelligence(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
CloudVideoIntelligence(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Operations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Operations.List request = videointelligence.operations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Operations operations() {
return new Operations();
}
/**
* The "operations" collection of methods.
*/
public class Operations {
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort to
* cancel the operation, but success is not guaranteed. If the server doesn't support this method,
* it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other
* methods to check whether the cancellation succeeded or whether the operation completed despite
* cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an
* operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to
* `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Cancel#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @return the request
*/
public Cancel cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) throws java.io.IOException {
Cancel result = new Cancel(name, content);
initialize(result);
return result;
}
public class Cancel extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}:cancel";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort
* to cancel the operation, but success is not guaranteed. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or
* other methods to check whether the cancellation succeeded or whether the operation completed
* despite cancellation. On successful cancellation, the operation is not deleted; instead, it
* becomes an operation with an Operation.error value with a google.rpc.Status.code of 1,
* corresponding to `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Cancel#execute()} method to invoke the remote
* operation. <p> {@link
* Cancel#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @since 1.13
*/
protected Cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Cancel set$Xgafv(java.lang.String $Xgafv) {
return (Cancel) super.set$Xgafv($Xgafv);
}
@Override
public Cancel setAccessToken(java.lang.String accessToken) {
return (Cancel) super.setAccessToken(accessToken);
}
@Override
public Cancel setAlt(java.lang.String alt) {
return (Cancel) super.setAlt(alt);
}
@Override
public Cancel setCallback(java.lang.String callback) {
return (Cancel) super.setCallback(callback);
}
@Override
public Cancel setFields(java.lang.String fields) {
return (Cancel) super.setFields(fields);
}
@Override
public Cancel setKey(java.lang.String key) {
return (Cancel) super.setKey(key);
}
@Override
public Cancel setOauthToken(java.lang.String oauthToken) {
return (Cancel) super.setOauthToken(oauthToken);
}
@Override
public Cancel setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Cancel) super.setPrettyPrint(prettyPrint);
}
@Override
public Cancel setQuotaUser(java.lang.String quotaUser) {
return (Cancel) super.setQuotaUser(quotaUser);
}
@Override
public Cancel setUploadType(java.lang.String uploadType) {
return (Cancel) super.setUploadType(uploadType);
}
@Override
public Cancel setUploadProtocol(java.lang.String uploadProtocol) {
return (Cancel) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be cancelled. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be cancelled.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be cancelled. */
public Cancel setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Cancel set(String parameterName, Object value) {
return (Cancel) super.set(parameterName, value);
}
}
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Delete#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be deleted.
* @return the request
*/
public Delete delete(java.lang.String name) throws java.io.IOException {
Delete result = new Delete(name);
initialize(result);
return result;
}
public class Delete extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Delete#execute()} method to invoke the remote
* operation. <p> {@link
* Delete#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be deleted.
* @since 1.13
*/
protected Delete(java.lang.String name) {
super(CloudVideoIntelligence.this, "DELETE", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Delete set$Xgafv(java.lang.String $Xgafv) {
return (Delete) super.set$Xgafv($Xgafv);
}
@Override
public Delete setAccessToken(java.lang.String accessToken) {
return (Delete) super.setAccessToken(accessToken);
}
@Override
public Delete setAlt(java.lang.String alt) {
return (Delete) super.setAlt(alt);
}
@Override
public Delete setCallback(java.lang.String callback) {
return (Delete) super.setCallback(callback);
}
@Override
public Delete setFields(java.lang.String fields) {
return (Delete) super.setFields(fields);
}
@Override
public Delete setKey(java.lang.String key) {
return (Delete) super.setKey(key);
}
@Override
public Delete setOauthToken(java.lang.String oauthToken) {
return (Delete) super.setOauthToken(oauthToken);
}
@Override
public Delete setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Delete) super.setPrettyPrint(prettyPrint);
}
@Override
public Delete setQuotaUser(java.lang.String quotaUser) {
return (Delete) super.setQuotaUser(quotaUser);
}
@Override
public Delete setUploadType(java.lang.String uploadType) {
return (Delete) super.setUploadType(uploadType);
}
@Override
public Delete setUploadProtocol(java.lang.String uploadProtocol) {
return (Delete) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be deleted. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be deleted.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be deleted. */
public Delete setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Delete set(String parameterName, Object value) {
return (Delete) super.set(parameterName, value);
}
}
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource.
* @return the request
*/
public Get get(java.lang.String name) throws java.io.IOException {
Get result = new Get(name);
initialize(result);
return result;
}
public class Get extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
* <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource.
* @since 1.13
*/
protected Get(java.lang.String name) {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource. */
public Get setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a binding
* such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse> {
private static final String REST_PATH = "v1/operations";
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a
* binding such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link List#execute()} method to invoke the remote operation.
* <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** The standard list filter. */
@com.google.api.client.util.Key
private java.lang.String filter;
/** The standard list filter.
*/
public java.lang.String getFilter() {
return filter;
}
/** The standard list filter. */
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/** The name of the operation's parent resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation's parent resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation's parent resource. */
public List setName(java.lang.String name) {
this.name = name;
return this;
}
/** The standard list page size. */
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The standard list page size.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/** The standard list page size. */
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/** The standard list page token. */
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** The standard list page token.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/** The standard list page token. */
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Videos collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Videos.List request = videointelligence.videos().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Videos videos() {
return new Videos();
}
/**
* The "videos" collection of methods.
*/
public class Videos {
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains `AnnotateVideoProgress`
* (progress). `Operation.response` contains `AnnotateVideoResponse` (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Annotate#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @return the request
*/
public Annotate annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) throws java.io.IOException {
Annotate result = new Annotate(content);
initialize(result);
return result;
}
public class Annotate extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/videos:annotate";
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains
* `AnnotateVideoProgress` (progress). `Operation.response` contains `AnnotateVideoResponse`
* (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Annotate#execute()} method to invoke the remote
* operation. <p> {@link
* Annotate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @since 1.13
*/
protected Annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
}
@Override
public Annotate set$Xgafv(java.lang.String $Xgafv) {
return (Annotate) super.set$Xgafv($Xgafv);
}
@Override
public Annotate setAccessToken(java.lang.String accessToken) {
return (Annotate) super.setAccessToken(accessToken);
}
@Override
public Annotate setAlt(java.lang.String alt) {
return (Annotate) super.setAlt(alt);
}
@Override
public Annotate setCallback(java.lang.String callback) {
return (Annotate) super.setCallback(callback);
}
@Override
public Annotate setFields(java.lang.String fields) {
return (Annotate) super.setFields(fields);
}
@Override
public Annotate setKey(java.lang.String key) {
return (Annotate) super.setKey(key);
}
@Override
public Annotate setOauthToken(java.lang.String oauthToken) {
return (Annotate) super.setOauthToken(oauthToken);
}
@Override
public Annotate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Annotate) super.setPrettyPrint(prettyPrint);
}
@Override
public Annotate setQuotaUser(java.lang.String quotaUser) {
return (Annotate) super.setQuotaUser(quotaUser);
}
@Override
public Annotate setUploadType(java.lang.String uploadType) {
return (Annotate) super.setUploadType(uploadType);
}
@Override
public Annotate setUploadProtocol(java.lang.String uploadProtocol) {
return (Annotate) super.setUploadProtocol(uploadProtocol);
}
@Override
public Annotate set(String parameterName, Object value) {
return (Annotate) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link CloudVideoIntelligence}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link CloudVideoIntelligence}. */
@Override
public CloudVideoIntelligence build() {
return new CloudVideoIntelligence(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link CloudVideoIntelligenceRequestInitializer}.
*
* @since 1.12
*/
public Builder setCloudVideoIntelligenceRequestInitializer(
CloudVideoIntelligenceRequestInitializer cloudvideointelligenceRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(cloudvideointelligenceRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-api-java-client-services | 36,824 | clients/google-api-services-videointelligence/v1/1.28.0/com/google/api/services/videointelligence/v1/CloudVideoIntelligence.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.videointelligence.v1;
/**
* Service definition for CloudVideoIntelligence (v1).
*
* <p>
* Detects objects, explicit content, and scene changes in videos. It also specifies the region for annotation and transcribes speech to text. Supports both asynchronous API and streaming API.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/video-intelligence/docs/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link CloudVideoIntelligenceRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class CloudVideoIntelligence extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.28.0 of the Cloud Video Intelligence API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://videointelligence.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public CloudVideoIntelligence(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
CloudVideoIntelligence(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Operations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Operations.List request = videointelligence.operations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Operations operations() {
return new Operations();
}
/**
* The "operations" collection of methods.
*/
public class Operations {
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort to
* cancel the operation, but success is not guaranteed. If the server doesn't support this method,
* it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other
* methods to check whether the cancellation succeeded or whether the operation completed despite
* cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an
* operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to
* `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Cancel#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @return the request
*/
public Cancel cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) throws java.io.IOException {
Cancel result = new Cancel(name, content);
initialize(result);
return result;
}
public class Cancel extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}:cancel";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Starts asynchronous cancellation on a long-running operation. The server makes a best effort
* to cancel the operation, but success is not guaranteed. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or
* other methods to check whether the cancellation succeeded or whether the operation completed
* despite cancellation. On successful cancellation, the operation is not deleted; instead, it
* becomes an operation with an Operation.error value with a google.rpc.Status.code of 1,
* corresponding to `Code.CANCELLED`.
*
* Create a request for the method "operations.cancel".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Cancel#execute()} method to invoke the remote
* operation. <p> {@link
* Cancel#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be cancelled.
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest}
* @since 1.13
*/
protected Cancel(java.lang.String name, com.google.api.services.videointelligence.v1.model.GoogleLongrunningCancelOperationRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Cancel set$Xgafv(java.lang.String $Xgafv) {
return (Cancel) super.set$Xgafv($Xgafv);
}
@Override
public Cancel setAccessToken(java.lang.String accessToken) {
return (Cancel) super.setAccessToken(accessToken);
}
@Override
public Cancel setAlt(java.lang.String alt) {
return (Cancel) super.setAlt(alt);
}
@Override
public Cancel setCallback(java.lang.String callback) {
return (Cancel) super.setCallback(callback);
}
@Override
public Cancel setFields(java.lang.String fields) {
return (Cancel) super.setFields(fields);
}
@Override
public Cancel setKey(java.lang.String key) {
return (Cancel) super.setKey(key);
}
@Override
public Cancel setOauthToken(java.lang.String oauthToken) {
return (Cancel) super.setOauthToken(oauthToken);
}
@Override
public Cancel setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Cancel) super.setPrettyPrint(prettyPrint);
}
@Override
public Cancel setQuotaUser(java.lang.String quotaUser) {
return (Cancel) super.setQuotaUser(quotaUser);
}
@Override
public Cancel setUploadType(java.lang.String uploadType) {
return (Cancel) super.setUploadType(uploadType);
}
@Override
public Cancel setUploadProtocol(java.lang.String uploadProtocol) {
return (Cancel) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be cancelled. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be cancelled.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be cancelled. */
public Cancel setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Cancel set(String parameterName, Object value) {
return (Cancel) super.set(parameterName, value);
}
}
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Delete#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource to be deleted.
* @return the request
*/
public Delete delete(java.lang.String name) throws java.io.IOException {
Delete result = new Delete(name);
initialize(result);
return result;
}
public class Delete extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Deletes a long-running operation. This method indicates that the client is no longer interested
* in the operation result. It does not cancel the operation. If the server doesn't support this
* method, it returns `google.rpc.Code.UNIMPLEMENTED`.
*
* Create a request for the method "operations.delete".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Delete#execute()} method to invoke the remote
* operation. <p> {@link
* Delete#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource to be deleted.
* @since 1.13
*/
protected Delete(java.lang.String name) {
super(CloudVideoIntelligence.this, "DELETE", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleProtobufEmpty.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public Delete set$Xgafv(java.lang.String $Xgafv) {
return (Delete) super.set$Xgafv($Xgafv);
}
@Override
public Delete setAccessToken(java.lang.String accessToken) {
return (Delete) super.setAccessToken(accessToken);
}
@Override
public Delete setAlt(java.lang.String alt) {
return (Delete) super.setAlt(alt);
}
@Override
public Delete setCallback(java.lang.String callback) {
return (Delete) super.setCallback(callback);
}
@Override
public Delete setFields(java.lang.String fields) {
return (Delete) super.setFields(fields);
}
@Override
public Delete setKey(java.lang.String key) {
return (Delete) super.setKey(key);
}
@Override
public Delete setOauthToken(java.lang.String oauthToken) {
return (Delete) super.setOauthToken(oauthToken);
}
@Override
public Delete setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Delete) super.setPrettyPrint(prettyPrint);
}
@Override
public Delete setQuotaUser(java.lang.String quotaUser) {
return (Delete) super.setQuotaUser(quotaUser);
}
@Override
public Delete setUploadType(java.lang.String uploadType) {
return (Delete) super.setUploadType(uploadType);
}
@Override
public Delete setUploadProtocol(java.lang.String uploadProtocol) {
return (Delete) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource to be deleted. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource to be deleted.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource to be deleted. */
public Delete setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Delete set(String parameterName, Object value) {
return (Delete) super.set(parameterName, value);
}
}
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param name The name of the operation resource.
* @return the request
*/
public Get get(java.lang.String name) throws java.io.IOException {
Get result = new Get(name);
initialize(result);
return result;
}
public class Get extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/operations/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Gets the latest state of a long-running operation. Clients can use this method to poll the
* operation result at intervals as recommended by the API service.
*
* Create a request for the method "operations.get".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
* <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The name of the operation resource.
* @since 1.13
*/
protected Get(java.lang.String name) {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** The name of the operation resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation resource. */
public Get setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^[^/]+$");
}
this.name = name;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a binding
* such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse> {
private static final String REST_PATH = "v1/operations";
/**
* Lists operations that match the specified filter in the request. If the server doesn't support
* this method, it returns `UNIMPLEMENTED`.
*
* NOTE: the `name` binding allows API services to override the binding to use different resource
* name schemes, such as `users/operations`. To override the binding, API services can add a
* binding such as `"/v1/{name=users}/operations"` to their service configuration. For backwards
* compatibility, the default name includes the operations collection id, however overriding users
* must ensure the name binding is the parent resource, without the operations collection id.
*
* Create a request for the method "operations.list".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link List#execute()} method to invoke the remote operation.
* <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(CloudVideoIntelligence.this, "GET", REST_PATH, null, com.google.api.services.videointelligence.v1.model.GoogleLongrunningListOperationsResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** The standard list filter. */
@com.google.api.client.util.Key
private java.lang.String filter;
/** The standard list filter.
*/
public java.lang.String getFilter() {
return filter;
}
/** The standard list filter. */
public List setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/** The name of the operation's parent resource. */
@com.google.api.client.util.Key
private java.lang.String name;
/** The name of the operation's parent resource.
*/
public java.lang.String getName() {
return name;
}
/** The name of the operation's parent resource. */
public List setName(java.lang.String name) {
this.name = name;
return this;
}
/** The standard list page size. */
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The standard list page size.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/** The standard list page size. */
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/** The standard list page token. */
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** The standard list page token.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/** The standard list page token. */
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Videos collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code CloudVideoIntelligence videointelligence = new CloudVideoIntelligence(...);}
* {@code CloudVideoIntelligence.Videos.List request = videointelligence.videos().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Videos videos() {
return new Videos();
}
/**
* The "videos" collection of methods.
*/
public class Videos {
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains `AnnotateVideoProgress`
* (progress). `Operation.response` contains `AnnotateVideoResponse` (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the videointelligence server. After setting any
* optional parameters, call the {@link Annotate#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @return the request
*/
public Annotate annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) throws java.io.IOException {
Annotate result = new Annotate(content);
initialize(result);
return result;
}
public class Annotate extends CloudVideoIntelligenceRequest<com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation> {
private static final String REST_PATH = "v1/videos:annotate";
/**
* Performs asynchronous video annotation. Progress and results can be retrieved through the
* `google.longrunning.Operations` interface. `Operation.metadata` contains
* `AnnotateVideoProgress` (progress). `Operation.response` contains `AnnotateVideoResponse`
* (results).
*
* Create a request for the method "videos.annotate".
*
* This request holds the parameters needed by the the videointelligence server. After setting
* any optional parameters, call the {@link Annotate#execute()} method to invoke the remote
* operation. <p> {@link
* Annotate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest}
* @since 1.13
*/
protected Annotate(com.google.api.services.videointelligence.v1.model.GoogleCloudVideointelligenceV1AnnotateVideoRequest content) {
super(CloudVideoIntelligence.this, "POST", REST_PATH, content, com.google.api.services.videointelligence.v1.model.GoogleLongrunningOperation.class);
}
@Override
public Annotate set$Xgafv(java.lang.String $Xgafv) {
return (Annotate) super.set$Xgafv($Xgafv);
}
@Override
public Annotate setAccessToken(java.lang.String accessToken) {
return (Annotate) super.setAccessToken(accessToken);
}
@Override
public Annotate setAlt(java.lang.String alt) {
return (Annotate) super.setAlt(alt);
}
@Override
public Annotate setCallback(java.lang.String callback) {
return (Annotate) super.setCallback(callback);
}
@Override
public Annotate setFields(java.lang.String fields) {
return (Annotate) super.setFields(fields);
}
@Override
public Annotate setKey(java.lang.String key) {
return (Annotate) super.setKey(key);
}
@Override
public Annotate setOauthToken(java.lang.String oauthToken) {
return (Annotate) super.setOauthToken(oauthToken);
}
@Override
public Annotate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Annotate) super.setPrettyPrint(prettyPrint);
}
@Override
public Annotate setQuotaUser(java.lang.String quotaUser) {
return (Annotate) super.setQuotaUser(quotaUser);
}
@Override
public Annotate setUploadType(java.lang.String uploadType) {
return (Annotate) super.setUploadType(uploadType);
}
@Override
public Annotate setUploadProtocol(java.lang.String uploadProtocol) {
return (Annotate) super.setUploadProtocol(uploadProtocol);
}
@Override
public Annotate set(String parameterName, Object value) {
return (Annotate) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link CloudVideoIntelligence}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link CloudVideoIntelligence}. */
@Override
public CloudVideoIntelligence build() {
return new CloudVideoIntelligence(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link CloudVideoIntelligenceRequestInitializer}.
*
* @since 1.12
*/
public Builder setCloudVideoIntelligenceRequestInitializer(
CloudVideoIntelligenceRequestInitializer cloudvideointelligenceRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(cloudvideointelligenceRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 36,743 | java-contentwarehouse/proto-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/ListRuleSetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contentwarehouse/v1/ruleset_service_request.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contentwarehouse.v1;
/**
*
*
* <pre>
* Response message for RuleSetService.ListRuleSets.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListRuleSetsResponse}
*/
public final class ListRuleSetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contentwarehouse.v1.ListRuleSetsResponse)
ListRuleSetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRuleSetsResponse.newBuilder() to construct.
private ListRuleSetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRuleSetsResponse() {
ruleSets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRuleSetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_ListRuleSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_ListRuleSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.class,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.Builder.class);
}
public static final int RULE_SETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.contentwarehouse.v1.RuleSet> ruleSets_;
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.contentwarehouse.v1.RuleSet> getRuleSetsList() {
return ruleSets_;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder>
getRuleSetsOrBuilderList() {
return ruleSets_;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
@java.lang.Override
public int getRuleSetsCount() {
return ruleSets_.size();
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.RuleSet getRuleSets(int index) {
return ruleSets_.get(index);
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder getRuleSetsOrBuilder(int index) {
return ruleSets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < ruleSets_.size(); i++) {
output.writeMessage(1, ruleSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < ruleSets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, ruleSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse)) {
return super.equals(obj);
}
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse other =
(com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse) obj;
if (!getRuleSetsList().equals(other.getRuleSetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRuleSetsCount() > 0) {
hash = (37 * hash) + RULE_SETS_FIELD_NUMBER;
hash = (53 * hash) + getRuleSetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for RuleSetService.ListRuleSets.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.ListRuleSetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contentwarehouse.v1.ListRuleSetsResponse)
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_ListRuleSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_ListRuleSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.class,
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.Builder.class);
}
// Construct using com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (ruleSetsBuilder_ == null) {
ruleSets_ = java.util.Collections.emptyList();
} else {
ruleSets_ = null;
ruleSetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contentwarehouse.v1.RuleSetServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_ListRuleSetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse getDefaultInstanceForType() {
return com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse build() {
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse buildPartial() {
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse result =
new com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse result) {
if (ruleSetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
ruleSets_ = java.util.Collections.unmodifiableList(ruleSets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.ruleSets_ = ruleSets_;
} else {
result.ruleSets_ = ruleSetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse) {
return mergeFrom((com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse other) {
if (other == com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse.getDefaultInstance())
return this;
if (ruleSetsBuilder_ == null) {
if (!other.ruleSets_.isEmpty()) {
if (ruleSets_.isEmpty()) {
ruleSets_ = other.ruleSets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRuleSetsIsMutable();
ruleSets_.addAll(other.ruleSets_);
}
onChanged();
}
} else {
if (!other.ruleSets_.isEmpty()) {
if (ruleSetsBuilder_.isEmpty()) {
ruleSetsBuilder_.dispose();
ruleSetsBuilder_ = null;
ruleSets_ = other.ruleSets_;
bitField0_ = (bitField0_ & ~0x00000001);
ruleSetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRuleSetsFieldBuilder()
: null;
} else {
ruleSetsBuilder_.addAllMessages(other.ruleSets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.contentwarehouse.v1.RuleSet m =
input.readMessage(
com.google.cloud.contentwarehouse.v1.RuleSet.parser(), extensionRegistry);
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
ruleSets_.add(m);
} else {
ruleSetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.contentwarehouse.v1.RuleSet> ruleSets_ =
java.util.Collections.emptyList();
private void ensureRuleSetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
ruleSets_ =
new java.util.ArrayList<com.google.cloud.contentwarehouse.v1.RuleSet>(ruleSets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.RuleSet,
com.google.cloud.contentwarehouse.v1.RuleSet.Builder,
com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder>
ruleSetsBuilder_;
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.RuleSet> getRuleSetsList() {
if (ruleSetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(ruleSets_);
} else {
return ruleSetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public int getRuleSetsCount() {
if (ruleSetsBuilder_ == null) {
return ruleSets_.size();
} else {
return ruleSetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet getRuleSets(int index) {
if (ruleSetsBuilder_ == null) {
return ruleSets_.get(index);
} else {
return ruleSetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder setRuleSets(int index, com.google.cloud.contentwarehouse.v1.RuleSet value) {
if (ruleSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleSetsIsMutable();
ruleSets_.set(index, value);
onChanged();
} else {
ruleSetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder setRuleSets(
int index, com.google.cloud.contentwarehouse.v1.RuleSet.Builder builderForValue) {
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
ruleSets_.set(index, builderForValue.build());
onChanged();
} else {
ruleSetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder addRuleSets(com.google.cloud.contentwarehouse.v1.RuleSet value) {
if (ruleSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleSetsIsMutable();
ruleSets_.add(value);
onChanged();
} else {
ruleSetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder addRuleSets(int index, com.google.cloud.contentwarehouse.v1.RuleSet value) {
if (ruleSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleSetsIsMutable();
ruleSets_.add(index, value);
onChanged();
} else {
ruleSetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder addRuleSets(
com.google.cloud.contentwarehouse.v1.RuleSet.Builder builderForValue) {
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
ruleSets_.add(builderForValue.build());
onChanged();
} else {
ruleSetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder addRuleSets(
int index, com.google.cloud.contentwarehouse.v1.RuleSet.Builder builderForValue) {
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
ruleSets_.add(index, builderForValue.build());
onChanged();
} else {
ruleSetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder addAllRuleSets(
java.lang.Iterable<? extends com.google.cloud.contentwarehouse.v1.RuleSet> values) {
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ruleSets_);
onChanged();
} else {
ruleSetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder clearRuleSets() {
if (ruleSetsBuilder_ == null) {
ruleSets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
ruleSetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public Builder removeRuleSets(int index) {
if (ruleSetsBuilder_ == null) {
ensureRuleSetsIsMutable();
ruleSets_.remove(index);
onChanged();
} else {
ruleSetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet.Builder getRuleSetsBuilder(int index) {
return getRuleSetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder getRuleSetsOrBuilder(int index) {
if (ruleSetsBuilder_ == null) {
return ruleSets_.get(index);
} else {
return ruleSetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder>
getRuleSetsOrBuilderList() {
if (ruleSetsBuilder_ != null) {
return ruleSetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(ruleSets_);
}
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet.Builder addRuleSetsBuilder() {
return getRuleSetsFieldBuilder()
.addBuilder(com.google.cloud.contentwarehouse.v1.RuleSet.getDefaultInstance());
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public com.google.cloud.contentwarehouse.v1.RuleSet.Builder addRuleSetsBuilder(int index) {
return getRuleSetsFieldBuilder()
.addBuilder(index, com.google.cloud.contentwarehouse.v1.RuleSet.getDefaultInstance());
}
/**
*
*
* <pre>
* The rule sets from the specified parent.
* </pre>
*
* <code>repeated .google.cloud.contentwarehouse.v1.RuleSet rule_sets = 1;</code>
*/
public java.util.List<com.google.cloud.contentwarehouse.v1.RuleSet.Builder>
getRuleSetsBuilderList() {
return getRuleSetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.RuleSet,
com.google.cloud.contentwarehouse.v1.RuleSet.Builder,
com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder>
getRuleSetsFieldBuilder() {
if (ruleSetsBuilder_ == null) {
ruleSetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.RuleSet,
com.google.cloud.contentwarehouse.v1.RuleSet.Builder,
com.google.cloud.contentwarehouse.v1.RuleSetOrBuilder>(
ruleSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
ruleSets_ = null;
}
return ruleSetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contentwarehouse.v1.ListRuleSetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.contentwarehouse.v1.ListRuleSetsResponse)
private static final com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse();
}
public static com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRuleSetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRuleSetsResponse>() {
@java.lang.Override
public ListRuleSetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRuleSetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRuleSetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.ListRuleSetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/syncope | 37,183 | core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/data/TaskDataBinderImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.provisioning.java.data;
import java.util.Comparator;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.syncope.common.lib.SyncopeClientException;
import org.apache.syncope.common.lib.command.CommandArgs;
import org.apache.syncope.common.lib.command.CommandTO;
import org.apache.syncope.common.lib.form.FormProperty;
import org.apache.syncope.common.lib.form.FormPropertyValue;
import org.apache.syncope.common.lib.form.SyncopeForm;
import org.apache.syncope.common.lib.to.ExecTO;
import org.apache.syncope.common.lib.to.FormPropertyDefTO;
import org.apache.syncope.common.lib.to.InboundTaskTO;
import org.apache.syncope.common.lib.to.LiveSyncTaskTO;
import org.apache.syncope.common.lib.to.MacroTaskTO;
import org.apache.syncope.common.lib.to.NotificationTaskTO;
import org.apache.syncope.common.lib.to.PropagationTaskTO;
import org.apache.syncope.common.lib.to.ProvisioningTaskTO;
import org.apache.syncope.common.lib.to.PullTaskTO;
import org.apache.syncope.common.lib.to.PushTaskTO;
import org.apache.syncope.common.lib.to.SchedTaskTO;
import org.apache.syncope.common.lib.to.TaskTO;
import org.apache.syncope.common.lib.types.ClientExceptionType;
import org.apache.syncope.common.lib.types.IdRepoImplementationType;
import org.apache.syncope.common.lib.types.ImplementationEngine;
import org.apache.syncope.common.lib.types.JobType;
import org.apache.syncope.common.lib.types.MatchingRule;
import org.apache.syncope.common.lib.types.TaskType;
import org.apache.syncope.common.lib.types.UnmatchingRule;
import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO;
import org.apache.syncope.core.persistence.api.dao.ExternalResourceDAO;
import org.apache.syncope.core.persistence.api.dao.ImplementationDAO;
import org.apache.syncope.core.persistence.api.dao.NotFoundException;
import org.apache.syncope.core.persistence.api.dao.RealmSearchDAO;
import org.apache.syncope.core.persistence.api.dao.TaskExecDAO;
import org.apache.syncope.core.persistence.api.entity.EntityFactory;
import org.apache.syncope.core.persistence.api.entity.Implementation;
import org.apache.syncope.core.persistence.api.entity.task.AnyTemplateLiveSyncTask;
import org.apache.syncope.core.persistence.api.entity.task.AnyTemplatePullTask;
import org.apache.syncope.core.persistence.api.entity.task.FormPropertyDef;
import org.apache.syncope.core.persistence.api.entity.task.InboundTask;
import org.apache.syncope.core.persistence.api.entity.task.LiveSyncTask;
import org.apache.syncope.core.persistence.api.entity.task.MacroTask;
import org.apache.syncope.core.persistence.api.entity.task.MacroTaskCommand;
import org.apache.syncope.core.persistence.api.entity.task.NotificationTask;
import org.apache.syncope.core.persistence.api.entity.task.PropagationTask;
import org.apache.syncope.core.persistence.api.entity.task.ProvisioningTask;
import org.apache.syncope.core.persistence.api.entity.task.PullTask;
import org.apache.syncope.core.persistence.api.entity.task.PushTask;
import org.apache.syncope.core.persistence.api.entity.task.SchedTask;
import org.apache.syncope.core.persistence.api.entity.task.Task;
import org.apache.syncope.core.persistence.api.entity.task.TaskExec;
import org.apache.syncope.core.persistence.api.entity.task.TaskUtils;
import org.apache.syncope.core.persistence.api.entity.task.TaskUtilsFactory;
import org.apache.syncope.core.provisioning.api.data.TaskDataBinder;
import org.apache.syncope.core.provisioning.api.job.JobNamer;
import org.apache.syncope.core.provisioning.api.macro.MacroActions;
import org.apache.syncope.core.provisioning.java.job.MacroJobDelegate;
import org.apache.syncope.core.provisioning.java.job.SyncopeTaskScheduler;
import org.apache.syncope.core.provisioning.java.pushpull.LiveSyncJobDelegate;
import org.apache.syncope.core.provisioning.java.pushpull.PullJobDelegate;
import org.apache.syncope.core.provisioning.java.pushpull.PushJobDelegate;
import org.apache.syncope.core.provisioning.java.utils.TemplateUtils;
import org.apache.syncope.core.spring.implementation.ImplementationManager;
import org.apache.syncope.core.spring.security.AuthContextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TaskDataBinderImpl extends AbstractExecutableDatabinder implements TaskDataBinder {
protected static final Logger LOG = LoggerFactory.getLogger(TaskDataBinder.class);
protected final RealmSearchDAO realmSearchDAO;
protected final ExternalResourceDAO resourceDAO;
protected final TaskExecDAO taskExecDAO;
protected final AnyTypeDAO anyTypeDAO;
protected final ImplementationDAO implementationDAO;
protected final EntityFactory entityFactory;
protected final SyncopeTaskScheduler scheduler;
protected final TaskUtilsFactory taskUtilsFactory;
protected final Map<String, MacroActions> perContextMacroActions = new ConcurrentHashMap<>();
public TaskDataBinderImpl(
final RealmSearchDAO realmSearchDAO,
final ExternalResourceDAO resourceDAO,
final TaskExecDAO taskExecDAO,
final AnyTypeDAO anyTypeDAO,
final ImplementationDAO implementationDAO,
final EntityFactory entityFactory,
final SyncopeTaskScheduler scheduler,
final TaskUtilsFactory taskUtilsFactory) {
this.realmSearchDAO = realmSearchDAO;
this.resourceDAO = resourceDAO;
this.taskExecDAO = taskExecDAO;
this.anyTypeDAO = anyTypeDAO;
this.implementationDAO = implementationDAO;
this.entityFactory = entityFactory;
this.scheduler = scheduler;
this.taskUtilsFactory = taskUtilsFactory;
}
protected void fill(final ProvisioningTask<?> provisioningTask, final ProvisioningTaskTO provisioningTaskTO) {
if (provisioningTask instanceof final PushTask pushTask
&& provisioningTaskTO instanceof final PushTaskTO pushTaskTO) {
Implementation jobDelegate = pushTaskTO.getJobDelegate() == null
? implementationDAO.findByType(IdRepoImplementationType.TASKJOB_DELEGATE).stream().
filter(impl -> PushJobDelegate.class.getSimpleName().equals(impl.getKey())).
findFirst().orElse(null)
: implementationDAO.findById(pushTaskTO.getJobDelegate()).orElse(null);
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(PushJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(IdRepoImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(PushJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
pushTask.setJobDelegate(jobDelegate);
pushTask.setSourceRealm(realmSearchDAO.findByFullPath(pushTaskTO.getSourceRealm()).
orElseThrow(() -> new NotFoundException("Realm " + pushTaskTO.getSourceRealm())));
pushTask.setMatchingRule(pushTaskTO.getMatchingRule() == null
? MatchingRule.LINK : pushTaskTO.getMatchingRule());
pushTask.setUnmatchingRule(pushTaskTO.getUnmatchingRule() == null
? UnmatchingRule.ASSIGN : pushTaskTO.getUnmatchingRule());
pushTaskTO.getFilters().forEach((type, fiql) -> anyTypeDAO.findById(type).ifPresentOrElse(
anyType -> pushTask.getFilters().put(anyType.getKey(), fiql),
() -> LOG.debug("Invalid AnyType {} specified, ignoring...", type)));
// remove all filters not contained in the TO
pushTask.getFilters().entrySet().
removeIf(filter -> !pushTaskTO.getFilters().containsKey(filter.getKey()));
} else if (provisioningTask instanceof final InboundTask<?> inboundTask
&& provisioningTaskTO instanceof final InboundTaskTO inboundTaskTO) {
inboundTask.setDestinationRealm(realmSearchDAO.findByFullPath(inboundTaskTO.getDestinationRealm()).
orElseThrow(() -> new NotFoundException("Realm " + inboundTaskTO.getDestinationRealm())));
inboundTask.setMatchingRule(inboundTaskTO.getMatchingRule() == null
? MatchingRule.UPDATE : inboundTaskTO.getMatchingRule());
inboundTask.setUnmatchingRule(inboundTaskTO.getUnmatchingRule() == null
? UnmatchingRule.PROVISION : inboundTaskTO.getUnmatchingRule());
inboundTask.setRemediation(inboundTaskTO.isRemediation());
if (provisioningTask instanceof final LiveSyncTask liveSyncTask
&& provisioningTaskTO instanceof final LiveSyncTaskTO liveSyncTaskTO) {
Implementation jobDelegate = liveSyncTaskTO.getJobDelegate() == null
? implementationDAO.findByType(IdRepoImplementationType.TASKJOB_DELEGATE).stream().
filter(impl -> LiveSyncJobDelegate.class.getSimpleName().equals(impl.getKey())).
findFirst().orElse(null)
: implementationDAO.findById(liveSyncTaskTO.getJobDelegate()).orElse(null);
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(LiveSyncJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(IdRepoImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(LiveSyncJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
liveSyncTask.setJobDelegate(jobDelegate);
liveSyncTask.setDelaySecondsAcrossInvocations(liveSyncTaskTO.getDelaySecondsAcrossInvocations());
if (liveSyncTaskTO.getLiveSyncDeltaMapper() == null) {
liveSyncTask.setLiveSyncDeltaMapper(null);
} else {
implementationDAO.findById(liveSyncTaskTO.getLiveSyncDeltaMapper()).ifPresentOrElse(
liveSyncTask::setLiveSyncDeltaMapper,
() -> LOG.debug("Invalid Implementation {}, ignoring...",
liveSyncTaskTO.getLiveSyncDeltaMapper()));
}
// validate JEXL expressions from templates and proceed if fine
TemplateUtils.check(liveSyncTaskTO.getTemplates(), ClientExceptionType.InvalidLiveSyncTask);
liveSyncTaskTO.getTemplates().forEach((type, template) -> anyTypeDAO.findById(type).ifPresentOrElse(
anyType -> {
AnyTemplateLiveSyncTask anyTemplate = liveSyncTask.getTemplate(anyType.getKey()).
orElse(null);
if (anyTemplate == null) {
anyTemplate = entityFactory.newEntity(AnyTemplateLiveSyncTask.class);
anyTemplate.setAnyType(anyType);
anyTemplate.setLiveSyncTask(liveSyncTask);
liveSyncTask.add(anyTemplate);
}
anyTemplate.set(template);
},
() -> LOG.debug("Invalid AnyType {} specified, ignoring...", type)));
// remove all templates not contained in the TO
liveSyncTask.getTemplates().removeIf(
anyTemplate -> !liveSyncTaskTO.getTemplates().containsKey(anyTemplate.getAnyType().getKey()));
} else if (provisioningTask instanceof final PullTask pullTask
&& provisioningTaskTO instanceof final PullTaskTO pullTaskTO) {
Implementation jobDelegate = pullTaskTO.getJobDelegate() == null
? implementationDAO.findByType(IdRepoImplementationType.TASKJOB_DELEGATE).stream().
filter(impl -> PullJobDelegate.class.getSimpleName().equals(impl.getKey())).
findFirst().orElse(null)
: implementationDAO.findById(pullTaskTO.getJobDelegate()).orElse(null);
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(PullJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(IdRepoImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(PullJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
pullTask.setJobDelegate(jobDelegate);
pullTask.setPullMode(pullTaskTO.getPullMode());
if (pullTaskTO.getReconFilterBuilder() == null) {
pullTask.setReconFilterBuilder(null);
} else {
implementationDAO.findById(pullTaskTO.getReconFilterBuilder()).ifPresentOrElse(
pullTask::setReconFilterBuilder,
() -> LOG.debug("Invalid Implementation {}, ignoring...",
pullTaskTO.getReconFilterBuilder()));
}
// validate JEXL expressions from templates and proceed if fine
TemplateUtils.check(pullTaskTO.getTemplates(), ClientExceptionType.InvalidPullTask);
pullTaskTO.getTemplates().forEach((type, template) -> anyTypeDAO.findById(type).ifPresentOrElse(
anyType -> {
AnyTemplatePullTask anyTemplate = pullTask.getTemplate(anyType.getKey()).orElse(null);
if (anyTemplate == null) {
anyTemplate = entityFactory.newEntity(AnyTemplatePullTask.class);
anyTemplate.setAnyType(anyType);
anyTemplate.setPullTask(pullTask);
pullTask.add(anyTemplate);
}
anyTemplate.set(template);
},
() -> LOG.debug("Invalid AnyType {} specified, ignoring...", type)));
// remove all templates not contained in the TO
pullTask.getTemplates().
removeIf(anyTemplate -> !pullTaskTO.getTemplates().
containsKey(anyTemplate.getAnyType().getKey()));
}
}
// 3. fill the remaining fields
provisioningTask.setPerformCreate(provisioningTaskTO.isPerformCreate());
provisioningTask.setPerformUpdate(provisioningTaskTO.isPerformUpdate());
provisioningTask.setPerformDelete(provisioningTaskTO.isPerformDelete());
provisioningTask.setSyncStatus(provisioningTaskTO.isSyncStatus());
provisioningTaskTO.getActions().forEach(action -> implementationDAO.findById(action).ifPresentOrElse(
provisioningTask::add,
() -> LOG.debug("Invalid Implementation {}, ignoring...", action)));
// remove all implementations not contained in the TO
provisioningTask.getActions().removeIf(impl -> !provisioningTaskTO.getActions().contains(impl.getKey()));
provisioningTask.setConcurrentSettings(provisioningTaskTO.getConcurrentSettings());
}
protected void fill(final MacroTask macroTask, final MacroTaskTO macroTaskTO) {
macroTask.setRealm(realmSearchDAO.findByFullPath(macroTaskTO.getRealm()).
orElseThrow(() -> new NotFoundException("Realm " + macroTaskTO.getRealm())));
macroTask.getCommands().clear();
macroTaskTO.getCommands().
forEach(command -> implementationDAO.findById(command.getKey()).ifPresentOrElse(
impl -> {
try {
CommandArgs args = command.getArgs();
if (args == null) {
args = ImplementationManager.emptyArgs(impl);
}
MacroTaskCommand macroTaskCommand = entityFactory.newEntity(MacroTaskCommand.class);
macroTaskCommand.setCommand(impl);
macroTaskCommand.setArgs(args);
macroTaskCommand.setMacroTask(macroTask);
macroTask.add(macroTaskCommand);
} catch (Exception e) {
LOG.error("While adding Command {} to Macro", impl.getKey(), e);
SyncopeClientException sce = SyncopeClientException.build(
ClientExceptionType.InvalidImplementationType);
sce.getElements().add("While adding Command " + impl.getKey() + ": " + e.getMessage());
throw sce;
}
},
() -> LOG.error("Could not find Command {}", command.getKey())));
macroTask.setContinueOnError(macroTaskTO.isContinueOnError());
macroTask.setSaveExecs(macroTaskTO.isSaveExecs());
macroTask.getFormPropertyDefs().clear();
macroTaskTO.getFormPropertyDefs().forEach(fpdTO -> {
FormPropertyDef fpd = entityFactory.newEntity(FormPropertyDef.class);
fpd.setName(fpdTO.getName());
fpd.getLabels().putAll(fpdTO.getLabels());
fpd.setType(fpdTO.getType());
fpd.setReadable(fpdTO.isReadable());
fpd.setWritable(fpdTO.isWritable());
fpd.setStringRegEx(fpdTO.getStringRegEx());
fpd.setRequired(fpdTO.isRequired());
fpd.setDatePattern(fpdTO.getDatePattern());
fpd.setEnumValues(fpdTO.getEnumValues());
fpd.setDropdownSingleSelection(fpdTO.isDropdownSingleSelection());
fpd.setDropdownFreeForm(fpdTO.isDropdownFreeForm());
fpd.setMimeType(fpdTO.getMimeType());
fpd.setMacroTask(macroTask);
macroTask.add(fpd);
});
if (macroTaskTO.getMacroActions() == null) {
macroTask.setMacroAction(null);
} else {
implementationDAO.findById(macroTaskTO.getMacroActions()).ifPresentOrElse(
macroTask::setMacroAction,
() -> LOG.debug("Invalid Implementation {}, ignoring...", macroTaskTO.getMacroActions()));
}
}
@Override
public SchedTask createSchedTask(final SchedTaskTO taskTO, final TaskUtils taskUtils) {
Class<? extends TaskTO> taskTOClass = taskUtils.getType().getToClass();
if (!taskTOClass.equals(taskTO.getClass())) {
throw new IllegalArgumentException(String.format("Expected %s, found %s", taskTOClass, taskTO.getClass()));
}
SchedTask task = taskUtils.newTask();
task.setCronExpression(taskTO.getCronExpression());
task.setName(taskTO.getName());
task.setDescription(taskTO.getDescription());
task.setActive(taskTO.isActive());
if (taskUtils.getType() == TaskType.SCHEDULED) {
task.setJobDelegate(implementationDAO.findById(taskTO.getJobDelegate()).
orElseThrow(() -> new NotFoundException("JobDelegate " + taskTO.getJobDelegate())));
} else if (taskTO instanceof MacroTaskTO macroTaskTO) {
MacroTask macroTask = (MacroTask) task;
Implementation jobDelegate = (macroTaskTO.getJobDelegate() == null
? implementationDAO.findByType(IdRepoImplementationType.TASKJOB_DELEGATE).stream().
filter(impl -> MacroJobDelegate.class.getName().equals(impl.getBody())).
findFirst()
: implementationDAO.findById(macroTaskTO.getJobDelegate())).
orElse(null);
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(MacroJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(IdRepoImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(MacroJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
macroTask.setJobDelegate(jobDelegate);
macroTask.setRealm(realmSearchDAO.findByFullPath(macroTaskTO.getRealm()).
orElseThrow(() -> new NotFoundException("Realm " + macroTaskTO.getRealm())));
fill(macroTask, macroTaskTO);
} else if (taskTO instanceof ProvisioningTaskTO provisioningTaskTO) {
ProvisioningTask<?> provisioningTask = (ProvisioningTask<?>) task;
provisioningTask.setResource(resourceDAO.findById(provisioningTaskTO.getResource()).
orElseThrow(() -> new NotFoundException("Resource " + provisioningTaskTO.getResource())));
fill(provisioningTask, provisioningTaskTO);
}
return task;
}
@Override
public void updateSchedTask(final SchedTask task, final SchedTaskTO taskTO, final TaskUtils taskUtils) {
Class<? extends TaskTO> taskTOClass = taskUtils.getType().getToClass();
if (!taskTOClass.equals(taskTO.getClass())) {
throw new IllegalArgumentException(String.format("Expected %s, found %s", taskTOClass, taskTO.getClass()));
}
if (StringUtils.isBlank(taskTO.getName())) {
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RequiredValuesMissing);
sce.getElements().add("name");
throw sce;
}
task.setName(taskTO.getName());
task.setDescription(taskTO.getDescription());
task.setCronExpression(taskTO.getCronExpression());
task.setActive(taskTO.isActive());
switch (task) {
case MacroTask macroTask ->
fill(macroTask, (MacroTaskTO) taskTO);
case ProvisioningTask<?> provisioningTask ->
fill(provisioningTask, (ProvisioningTaskTO) taskTO);
default -> {
}
}
}
@Override
public String buildRefDesc(final Task<?> task) {
return taskUtilsFactory.getInstance(task).getType().name() + ' '
+ "Task "
+ task.getKey() + ' '
+ (task instanceof SchedTask
? SchedTask.class.cast(task).getName()
: task instanceof PropagationTask
? PropagationTask.class.cast(task).getConnObjectKey()
: StringUtils.EMPTY);
}
@Override
public ExecTO getExecTO(final TaskExec<?> execution) {
ExecTO execTO = new ExecTO();
execTO.setKey(execution.getKey());
execTO.setStatus(execution.getStatus());
execTO.setMessage(execution.getMessage());
execTO.setStart(execution.getStart());
execTO.setEnd(execution.getEnd());
execTO.setExecutor(execution.getExecutor());
if (execution.getTask() != null && execution.getTask().getKey() != null) {
execTO.setJobType(JobType.TASK);
execTO.setRefKey(execution.getTask().getKey());
execTO.setRefDesc(buildRefDesc(execution.getTask()));
}
return execTO;
}
protected void fill(final SchedTaskTO schedTaskTO, final SchedTask schedTask) {
schedTaskTO.setName(schedTask.getName());
schedTaskTO.setDescription(schedTask.getDescription());
schedTaskTO.setCronExpression(schedTask.getCronExpression());
schedTaskTO.setActive(schedTask.isActive());
schedTaskTO.setJobDelegate(schedTask.getJobDelegate().getKey());
schedTaskTO.getExecutions().stream().max(Comparator.comparing(ExecTO::getStart)).
map(ExecTO::getStart).ifPresentOrElse(
schedTaskTO::setLastExec,
() -> schedTaskTO.setLastExec(schedTaskTO.getStart()));
scheduler.getNextTrigger(AuthContextUtils.getDomain(), JobNamer.getJobName(schedTask)).
ifPresent(schedTaskTO::setNextExec);
if (schedTaskTO instanceof final ProvisioningTaskTO provisioningTaskTO
&& schedTask instanceof final ProvisioningTask<?> provisioningTask) {
provisioningTaskTO.setResource(provisioningTask.getResource().getKey());
provisioningTaskTO.getActions().addAll(
provisioningTask.getActions().stream().map(Implementation::getKey).toList());
provisioningTaskTO.setPerformCreate(provisioningTask.isPerformCreate());
provisioningTaskTO.setPerformUpdate(provisioningTask.isPerformUpdate());
provisioningTaskTO.setPerformDelete(provisioningTask.isPerformDelete());
provisioningTaskTO.setSyncStatus(provisioningTask.isSyncStatus());
provisioningTaskTO.setConcurrentSettings(provisioningTask.getConcurrentSettings());
}
}
@Override
public <T extends TaskTO> T getTaskTO(final Task<?> task, final TaskUtils taskUtils, final boolean details) {
T taskTO = taskUtils.newTaskTO();
taskTO.setKey(task.getKey());
taskExecDAO.findLatestStarted(taskUtils.getType(), task).ifPresentOrElse(
latestExec -> {
taskTO.setLatestExecStatus(latestExec.getStatus());
taskTO.setStart(latestExec.getStart());
taskTO.setEnd(latestExec.getEnd());
taskTO.setLastExecutor(latestExec.getExecutor());
},
() -> taskTO.setLatestExecStatus(StringUtils.EMPTY));
if (details) {
task.getExecs().stream().
filter(Objects::nonNull).
forEach(execution -> taskTO.getExecutions().add(getExecTO(execution)));
}
switch (taskUtils.getType()) {
case PROPAGATION -> {
PropagationTask propagationTask = (PropagationTask) task;
PropagationTaskTO propagationTaskTO = (PropagationTaskTO) taskTO;
propagationTaskTO.setOperation(propagationTask.getOperation());
propagationTaskTO.setConnObjectKey(propagationTask.getConnObjectKey());
propagationTaskTO.setOldConnObjectKey(propagationTask.getOldConnObjectKey());
propagationTaskTO.setPropagationData(propagationTask.getSerializedPropagationData());
propagationTaskTO.setResource(propagationTask.getResource().getKey());
propagationTaskTO.setObjectClassName(propagationTask.getObjectClassName());
propagationTaskTO.setAnyTypeKind(propagationTask.getAnyTypeKind());
propagationTaskTO.setAnyType(propagationTask.getAnyType());
propagationTaskTO.setEntityKey(propagationTask.getEntityKey());
}
case SCHEDULED -> {
SchedTask schedTask = (SchedTask) task;
SchedTaskTO schedTaskTO = (SchedTaskTO) taskTO;
fill(schedTaskTO, schedTask);
}
case MACRO -> {
MacroTask macroTask = (MacroTask) task;
MacroTaskTO macroTaskTO = (MacroTaskTO) taskTO;
fill(macroTaskTO, macroTask);
macroTaskTO.setRealm(macroTask.getRealm().getFullPath());
macroTask.getCommands().forEach(mct -> macroTaskTO.getCommands().add(
new CommandTO.Builder(mct.getCommand().getKey()).args(mct.getArgs()).build()));
macroTaskTO.setContinueOnError(macroTask.isContinueOnError());
macroTaskTO.setSaveExecs(macroTask.isSaveExecs());
macroTask.getFormPropertyDefs().forEach(fpd -> {
FormPropertyDefTO fpdTO = new FormPropertyDefTO();
fpdTO.setKey(fpd.getKey());
fpdTO.setName(fpd.getName());
fpdTO.getLabels().putAll(fpd.getLabels());
fpdTO.setType(fpd.getType());
fpdTO.setReadable(fpd.isReadable());
fpdTO.setWritable(fpd.isWritable());
fpdTO.setRequired(fpd.isRequired());
fpdTO.setStringRegEx(fpd.getStringRegEx());
fpdTO.setDatePattern(fpd.getDatePattern());
fpdTO.getEnumValues().putAll(fpd.getEnumValues());
fpdTO.setDropdownSingleSelection(fpd.isDropdownSingleSelection());
fpdTO.setDropdownFreeForm(fpd.isDropdownFreeForm());
fpdTO.setMimeType(fpd.getMimeType());
macroTaskTO.getFormPropertyDefs().add(fpdTO);
});
Optional.ofNullable(macroTask.getMacroActions()).
ifPresent(fv -> macroTaskTO.setMacroActions(fv.getKey()));
}
case LIVE_SYNC -> {
LiveSyncTask liveSyncTask = (LiveSyncTask) task;
LiveSyncTaskTO liveSyncTaskTO = (LiveSyncTaskTO) taskTO;
fill(liveSyncTaskTO, liveSyncTask);
liveSyncTaskTO.setDestinationRealm(liveSyncTask.getDestinationRealm().getFullPath());
liveSyncTaskTO.setMatchingRule(liveSyncTask.getMatchingRule() == null
? MatchingRule.UPDATE : liveSyncTask.getMatchingRule());
liveSyncTaskTO.setUnmatchingRule(liveSyncTask.getUnmatchingRule() == null
? UnmatchingRule.PROVISION : liveSyncTask.getUnmatchingRule());
liveSyncTaskTO.setDelaySecondsAcrossInvocations(liveSyncTask.getDelaySecondsAcrossInvocations());
liveSyncTaskTO.setLiveSyncDeltaMapper(liveSyncTask.getLiveSyncDeltaMapper().getKey());
liveSyncTask.getTemplates().
forEach(template -> liveSyncTaskTO.getTemplates().
put(template.getAnyType().getKey(), template.get()));
}
case PULL -> {
PullTask pullTask = (PullTask) task;
PullTaskTO pullTaskTO = (PullTaskTO) taskTO;
fill(pullTaskTO, pullTask);
pullTaskTO.setDestinationRealm(pullTask.getDestinationRealm().getFullPath());
pullTaskTO.setMatchingRule(pullTask.getMatchingRule() == null
? MatchingRule.UPDATE : pullTask.getMatchingRule());
pullTaskTO.setUnmatchingRule(pullTask.getUnmatchingRule() == null
? UnmatchingRule.PROVISION : pullTask.getUnmatchingRule());
pullTaskTO.setPullMode(pullTask.getPullMode());
Optional.ofNullable(pullTask.getReconFilterBuilder()).
ifPresent(rfb -> pullTaskTO.setReconFilterBuilder(rfb.getKey()));
pullTask.getTemplates().
forEach(template -> pullTaskTO.getTemplates().
put(template.getAnyType().getKey(), template.get()));
pullTaskTO.setRemediation(pullTask.isRemediation());
}
case PUSH -> {
PushTask pushTask = (PushTask) task;
PushTaskTO pushTaskTO = (PushTaskTO) taskTO;
fill(pushTaskTO, pushTask);
pushTaskTO.setSourceRealm(pushTask.getSourceRealm().getFullPath());
pushTaskTO.setMatchingRule(pushTask.getMatchingRule() == null
? MatchingRule.LINK : pushTask.getMatchingRule());
pushTaskTO.setUnmatchingRule(pushTask.getUnmatchingRule() == null
? UnmatchingRule.ASSIGN : pushTask.getUnmatchingRule());
pushTaskTO.getFilters().putAll(pushTask.getFilters());
}
case NOTIFICATION -> {
NotificationTask notificationTask = (NotificationTask) task;
NotificationTaskTO notificationTaskTO = (NotificationTaskTO) taskTO;
notificationTaskTO.setNotification(notificationTask.getNotification().getKey());
notificationTaskTO.setAnyTypeKind(notificationTask.getAnyTypeKind());
notificationTaskTO.setEntityKey(notificationTask.getEntityKey());
notificationTaskTO.setSender(notificationTask.getSender());
notificationTaskTO.getRecipients().addAll(notificationTask.getRecipients());
notificationTaskTO.setSubject(notificationTask.getSubject());
notificationTaskTO.setHtmlBody(notificationTask.getHtmlBody());
notificationTaskTO.setTextBody(notificationTask.getTextBody());
notificationTaskTO.setExecuted(notificationTask.isExecuted());
if (notificationTask.isExecuted() && StringUtils.isBlank(taskTO.getLatestExecStatus())) {
taskTO.setLatestExecStatus("[EXECUTED]");
}
notificationTaskTO.setTraceLevel(notificationTask.getTraceLevel());
}
default -> {
}
}
return taskTO;
}
@Override
public SyncopeForm getMacroTaskForm(final MacroTask task, final Locale locale) {
if (task.getFormPropertyDefs().isEmpty()) {
throw new NotFoundException("No form properties defined for MacroTask " + task.getKey());
}
Optional<MacroActions> actions;
if (task.getMacroActions() == null) {
actions = Optional.empty();
} else {
try {
actions = Optional.of(ImplementationManager.build(
task.getMacroActions(),
() -> perContextMacroActions.get(task.getMacroActions().getKey()),
instance -> perContextMacroActions.put(task.getMacroActions().getKey(), instance)));
} catch (Exception e) {
LOG.error("Could not build {}", task.getMacroActions().getKey(), e);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidImplementation);
sce.getElements().add("Could not build " + task.getMacroActions().getKey());
throw sce;
}
}
SyncopeForm form = new SyncopeForm();
form.getProperties().addAll(task.getFormPropertyDefs().stream().map(fpd -> {
FormProperty prop = new FormProperty();
prop.setId(fpd.getName());
prop.setName(fpd.getLabels().getOrDefault(locale, fpd.getName()));
prop.setReadable(fpd.isReadable());
prop.setRequired(fpd.isRequired());
prop.setWritable(fpd.isWritable());
prop.setType(fpd.getType());
actions.flatMap(a -> a.getDefaultValue(fpd.getName())).ifPresent(prop::setValue);
switch (prop.getType()) {
case String ->
prop.setStringRegEx(fpd.getStringRegEx());
case Date ->
prop.setDatePattern(fpd.getDatePattern());
case Enum ->
fpd.getEnumValues().forEach((k, v) -> prop.getEnumValues().add(new FormPropertyValue(k, v)));
case Dropdown -> {
actions.ifPresent(a -> a.getDropdownValues(fpd.getName()).
forEach((k, v) -> prop.getDropdownValues().add(new FormPropertyValue(k, v))));
prop.setDropdownSingleSelection(fpd.isDropdownSingleSelection());
prop.setDropdownFreeForm(fpd.isDropdownFreeForm());
}
case Binary -> {
prop.setMimeType(fpd.getMimeType());
}
default -> {
}
}
return prop;
}).toList());
return form;
}
}
|
oracle/nosql | 36,918 | kvmain/src/main/java/oracle/kv/impl/api/TopologyManager.java | /*-
* Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.impl.api;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import java.util.logging.Logger;
import oracle.kv.Consistency;
import oracle.kv.KVStore;
import oracle.kv.StaleStoreHandleException;
import oracle.kv.impl.admin.TopologyHistoryWriteSysTableUtil;
import oracle.kv.impl.fault.OperationFaultException;
import oracle.kv.impl.fault.WrappedClientException;
import oracle.kv.impl.query.runtime.RuntimeControlBlock;
import oracle.kv.impl.security.InvalidSignatureException;
import oracle.kv.impl.systables.TopologyHistoryDesc;
import oracle.kv.impl.test.TestHook;
import oracle.kv.impl.test.TestHookExecute;
import oracle.kv.impl.topo.Partition;
import oracle.kv.impl.topo.PartitionId;
import oracle.kv.impl.topo.RepGroupId;
import oracle.kv.impl.topo.Topology;
import oracle.kv.impl.topo.change.TopologyChange;
import oracle.kv.impl.util.KVThreadFactory;
import oracle.kv.impl.util.SerializationUtil;
import oracle.kv.table.PrimaryKey;
import oracle.kv.table.ReadOptions;
import oracle.kv.table.Row;
import oracle.kv.table.Table;
import oracle.kv.table.TableAPI;
/**
* Coordinates access to the in-memory copy of the Topology. Saving the
* Topology in an environment is done, if needed, by the RepNode itself.
* <p>
* It makes provisions for registering pre and post update listeners that are
* invoked whenever the topology is changed. It's worth noting that there are
* three sets of callbacks that are executed in the following sequence:
* PreUpdateListener callbacks, Localizer callbacks, PostUpdateListener
* callbacks.
* <p>
* Note that some of the methods relating to the persistent management of
* topology are in RepNode rather than in this class where they would appear to
* belong logically. This is to ensure that this shared class which is used
* both by KV clients and RNs does not contain references to JE classes.
*/
public class TopologyManager {
/** Test hook for before we issue a read request to the store. */
public static volatile TestHook<Void> beforeReadTopologyFromStore;
/**
* The name of the kvstore
*/
private final String kvsName;
/* The current in-memory copy of the Topology. */
private volatile Topology topology;
/**
* The local topology. The local topology can only ever differ from the
* in-memory copy when the manager is running on the RepNode. In this case
* the local topology may contain modifications to the "official"
* topology due to partition migration activity. The local topology must
* only be used to direct client operations and must NEVER be sent to
* another node.
*/
private volatile Topology localTopology;
private Localizer localizer = null;
/**
* The listeners to be invoked before proceeding with a Topology update.
* Access must be synchronized on the manager instance.
*/
private final List<PreUpdateListener> preUpdateListeners =
new LinkedList<>();
/**
* The listeners to be invoked after a Topology update. Access must be
* synchronized on the manager instance. If the listener is held weakly
* then value is null, which allows the reference to be gc'ed, otherwise
* value is the listener keeping a strong reference on the listener.
* WeakHashMap key references are weak, while the value references are
* strong.
*/
private final Map<PostUpdateListener, PostUpdateListener>
postUpdateListeners = new WeakHashMap<>();
/**
* The number of topology changes to be retained when managing the
* topology.
*/
private final int maxTopoChanges;
private final Logger logger;
/** An executor for executing requests to fetch topology history. */
private final ExecutorService executor;
/**
* The outstanding futures of topology history fetch requests. Access must
* be under the synchroniztion block of this object.
*/
private final Map<Integer, CompletableFuture<Topology>>
outstandingTopologyReads = new HashMap<>();
/** The topology history cache. */
private final TopologyHistoryCache cache;
/**
* The constructor. Note that the manager starts out with a null Topology.
* It's first initialized with a call to {@link #update}
*
* @param kvsName the name of the store
* @param maxTopoChanges the max number of changes to be retained
* @param logger a logger
*/
public TopologyManager(String kvsName,
int maxTopoChanges,
Logger logger) {
this.kvsName = kvsName;
this.maxTopoChanges = maxTopoChanges;
this.logger = logger;
this.executor =
Executors.newCachedThreadPool(
new KVThreadFactory(
"TopologyManager#readTopologyFromStore",
logger));
this.cache = new TopologyHistoryCache(logger);
}
/**
* Adds a pre update listener to help track Topology changes. The primary
* purpose of the pre listener is to permit topology validation before
* updating to a new topology.
*
* @param listener the new listener
*/
public synchronized void addPreUpdateListener(PreUpdateListener listener) {
if (!preUpdateListeners.contains(listener)) {
preUpdateListeners.add(listener);
}
}
/**
* Adds a post update listener to help track Topology changes. All
* components that are dependent upon the Topology should register a
* listener, so they can be kept informed whenever the Topology changes.
*
* @param listener the new listener
*/
public void addPostUpdateListener(PostUpdateListener listener) {
addPostUpdateListener(listener, false);
}
/**
* Adds a post update listener to help track Topology changes. All
* components that are dependent upon the Topology should register a
* listener, so they can be kept informed whenever the Topology changes.
* If weak is true the listener is maintained with a weak reference allowing
* it to get GCed when the caller is done with it.
*
* @param listener the new listener
*/
public synchronized void addPostUpdateListener(PostUpdateListener listener,
boolean weak) {
if (!postUpdateListeners.containsKey(listener)) {
/*
* If weak, set the value to null so that the listner can be gc'ed.
* Otherwise keep a hard reference to the listener via the value.
*/
postUpdateListeners.put(listener, weak ? null : listener);
}
}
/**
* Removes the specified post update listener. This method should not be
* invoked from PostUpdateListener.postUpdate().
*
* @param listener the listener to remove
*/
public synchronized void
removePostUpdateListener(PostUpdateListener listener) {
postUpdateListeners.remove(listener);
}
/**
* Invoke the registered pre update listeners. These listeners are invoked
* before the "official" topology is updated.
*/
private void invokePreUpdateListeners(Topology newTopology) {
assert Thread.holdsLock(this);
/* Inform the listeners. */
for (PreUpdateListener l : preUpdateListeners) {
l.preUpdate(newTopology);
}
}
/**
* Invoke the registered post update listeners. These listeners are invoked
* after either the "official" or local topology has been updated.
*/
private void invokePostUpdateListeners() {
assert Thread.holdsLock(this);
/* Inform the listeners. */
final Iterator<PostUpdateListener> itr =
postUpdateListeners.keySet().iterator();
StringBuilder excStrBuilder = new StringBuilder();
while (itr.hasNext()) {
PostUpdateListener listener = itr.next();
try {
if (listener.postUpdate(topology)) {
itr.remove();
}
} catch (OperationFaultException e) {
if (excStrBuilder.length() == 0) {
excStrBuilder.append("Some topology post updates failed: ");
} else {
excStrBuilder.append(", ");
}
excStrBuilder.
append(listener).append(":").
append("(").append(e.getMessage()).append(")");
}
}
if (excStrBuilder.length() != 0) {
throw new OperationFaultException(excStrBuilder.toString());
}
}
/**
* Sets the localizer object for this manager. The localizer's
* localizeTopology() method will be invoked when the topology is
* updated.
*
* @param localizer
*/
public void setLocalizer(Localizer localizer) {
this.localizer = localizer;
}
public Topology getTopology() {
return topology;
}
/**
* Returns the local topology for this node. This should only be used to
* direct client requests. The returned topology must NEVER be sent to
* another node.
*
* @return the local topology
*/
public Topology getLocalTopology() {
return (localTopology == null) ? topology : localTopology;
}
/**
* For use by unit tests only.
*/
public void setLocalTopology(Topology localTopology) {
this.localTopology = localTopology;
}
/**
* Updates the Topology by replacing the entire Topology with a new
* instance. This is typically done in response to a request from the SNA.
* Or if the topology cannot be update incrementally because the
* necessary sequence of changes is not available in incremental form.
*
* The update is only done if the Topology is not current. If the Topology
* needed to be updated, but the update failed false is returned. Otherwise
* true is returned.
*
* @param newTopology the new Topology
*
* @return false if the update failed
*/
public synchronized boolean update(Topology newTopology) {
final int currSeqNum;
if (topology != null) {
if (!kvsName.equals(topology.getKVStoreName())) {
throw new IllegalArgumentException
("Update topology associated with KVStore: " +
topology.getKVStoreName() + " expected: " + kvsName);
}
checkTopologyId(topology.getId(), newTopology.getId());
currSeqNum = topology.getSequenceNumber();
} else {
currSeqNum = 0;
}
final int newSequenceNumber = newTopology.getSequenceNumber();
if (currSeqNum >= newSequenceNumber) {
logger.log(Level.INFO,
"Topology update skipped. " +
"Current seq #: {0} Update seq #: {1}",
new Object[]{currSeqNum, newSequenceNumber});
return true;
}
checkVersion(logger, newTopology);
/*
* Pre-updater may verify the signature of new topology copy. If the
* verification failed, don't continue with the update;
*/
try {
invokePreUpdateListeners(newTopology);
} catch (InvalidSignatureException ise) {
logger.info(String.format(
"Topology update to seq# %,d skipped due to " +
"invalid signature.",
newSequenceNumber));
return false;
}
/*
* If updating the local topology fails don't continue with the update.
*/
if (!updateLocalTopology(newTopology)) {
return false;
}
logger.log(Level.INFO, "Topology updated from seq#: {0} to {1}",
new Object[]{currSeqNum, newSequenceNumber});
topology = newTopology.pruneChanges(Integer.MAX_VALUE, maxTopoChanges);
onLocalTopologyUpdated();
/*
* Inform components that are dependent upon the Topology, so they
* can fix their internal state.
*/
invokePostUpdateListeners();
return true;
}
/**
* Called when the local topology is being updated. Caches the new topology
* and notifies all waiting future.
*/
private void onLocalTopologyUpdated() {
if (!Thread.holdsLock(this)) {
throw new IllegalStateException("Must hold the object lock");
}
cache.put(topology);
final CompletableFuture<Topology> future =
outstandingTopologyReads.get(topology.getSequenceNumber());
if (future != null) {
future.complete(topology);
}
}
/**
* Ensures that any changes in partition assignment at an RN can be
* explained by elasticity operations that are in progress. This
* verification relies on use of an absolutely consistent local topology
* which is only available at the master, so the check is only done on the
* master. It's the caller's responsibility to ensure that the method is
* only invoked on the master. The call is currently accomplished via the
* PreUpdateListener registered by the RepNode which has access to the
* replicated environment handle and can determine the HA state and
* decide whether the call should be made.
*
* @param rgId the replication group associated with the checks
*
* @param newTopo the new topology that is being checked
*
* @throws IllegalStateException if the partition checks fail
*/
public void checkPartitionChanges(RepGroupId rgId,
Topology newTopo)
throws IllegalStateException {
if ((topology == null) || (topology.getPartitionMap().size() == 0)) {
return;
}
/* Make copies to avoid race due to topologies changing */
final Topology currentTopo = topology.getCopy();
Topology localTopo = localTopology;
if (localTopo != null) {
localTopo = localTopo.getCopy();
}
final Set<PartitionId> currentPartitions =
getRGPartitions(rgId, currentTopo);
final Set<PartitionId> newPartitions = getRGPartitions(rgId, newTopo);
for (PartitionId npId : newPartitions) {
final Partition np = newTopo.get(npId);
final Partition cp = currentTopo.get(npId);
if (np.getRepGroupId().equals(cp.getRepGroupId())) {
/*
* NRG == CRG
*
* Current and new topologies agree on RG for this partition -
* continue. The local topology can be ignored.
*/
currentPartitions.remove(npId);
continue;
}
/*
* NRG != CRG
*
* The RG for the partition is different between current and new
* topologies. There should be migration going on (a local topology
* is present).
*/
if (localTopo == null) {
/*
* There cannot be a difference if no migration is in
* progress.
*/
final String msg =
String.format("%s in the new topology(seq #: %,d) " +
"is absent from this shard in the current " +
"topology(seq #: %,d) and there is no " +
"partition migration in progress.",
np, newTopo.getSequenceNumber(),
currentTopo.getSequenceNumber());
throw new IllegalStateException(msg);
}
/*
* NRG != CRG and local topology != null
*
* There is migration going on, so the local topology and new
* topology should match.
*/
final Partition lp = localTopo.get(npId);
if (lp.getRepGroupId().equals(np.getRepGroupId())) {
/*
* NRG != CRG and LRG == NRG
*
* The partition is in the process of moving to the group
* specified in the new topology. All is well.
*/
continue;
}
/* Disagreement on which RG the partition should be in. */
final String msg =
String.format("%s in the new topology(seq #: %,d) and %s" +
" in the local topology(internal seq#: %,d)" +
" are associated with different shards",
np, newTopo.getSequenceNumber(),
lp, localTopo.getSequenceNumber());
throw new IllegalStateException(msg);
}
/*
* At this point currentPartitions contains partitions in the RG in
* current topology which are not in the new topology's RG.
*/
for (PartitionId cpId : currentPartitions) {
final Partition cp = currentTopo.get(cpId);
final Partition lp = localTopo == null ? null : localTopo.get(cpId);
/*
* From above: CRG != NRG
*
* Any residual current partitions (after the removal of matching
* partitions above) should represent partitions that were migrated
* away from this migration source. They must be in the local
* topology. Their definition in the local topology may not agree
* with the definition in the new topology, since they may be in
* the process of being migrated.
*/
if (lp == null) {
/*
* There cannot be a difference if no migration is in
* progress.
*/
final String msg =
String.format("%s is in the current topology(seq #: %,d)" +
" but is absent from the new topology" +
" (seq #: %,d) and there is no" +
" partition migration in progress.",
cp, currentTopo.getSequenceNumber(),
newTopo.getSequenceNumber());
throw new IllegalStateException(msg);
}
/*
* CRG != NRG and local topology != null
*
* Check whether the partition has been migrated away and is
* therefore in the local topology associated with a different RG
*/
if (!lp.getRepGroupId().equals(cp.getRepGroupId())) {
/*
* A partition that was migrated out of this group. Note that
* we are not actually checking whether the migration has
* completed to keep things simple.
*/
continue;
}
/*
* CRG != NRG but LRG == CRG!
*
* Partition is present in the local topology with the same RG as
* the current topo. Disagreement on RGs between current and new
* topo that cannot be justified by the local topo.
*/
final String msg =
String.format("%s is associated with the same shard in both" +
" the current(seq #: %,d) and local topologies" +
" but is associated with a different shard %s" +
" in the new topology(seq#: %,d). ",
cp,
currentTopo.getSequenceNumber(),
newTopo.get(cpId).getRepGroupId(),
newTopo.getSequenceNumber());
throw new IllegalStateException(msg);
}
}
/**
* A utility method to retrieve all the partitions associated with an RG
*
* @param rgId identifies the filtering RG
*
* @param topo the topology containing the partitions
*
* @return the partition ids of the partitions hosted by the RG
*/
private Set<PartitionId> getRGPartitions(RepGroupId rgId,
Topology topo) {
final Set<PartitionId> hostedPartitions = new HashSet<>(100);
for (Partition p : topo.getPartitionMap().getAll()) {
if (!p.getRepGroupId().equals(rgId)) {
continue;
}
hostedPartitions.add(p.getResourceId());
}
return hostedPartitions;
}
/*
* Checks the topology version to make sure its acceptable. The version
* should typically have been upgraded to the current version as a
* consequence of deserialization.
*/
public static void checkVersion(Logger logger,
Topology topology) {
final int topoVersion = topology.getVersion();
if (topoVersion == Topology.CURRENT_VERSION) {
return; /* All's well, keep going. */
}
if (topoVersion == 0) {
/*
* r1 topology, inconsistent distribution of RNs across DCs. Warn
* and keep going.
*/
logger.warning("Using r1 topology, it was not upgraded.");
} else {
/* Should not happen. */
throw new OperationFaultException
("Encountered topology with version: " + topoVersion +
" Current topology version: " + Topology.CURRENT_VERSION);
}
}
/**
* Performs an incremental update to the Topology.
* <p>
* The update is sometimes done in the request/response loop, but it would
* be better if the update was done asynchronously so as not to impact
* request latency. We need an async version of the update operation for
* this purpose. Not a pressing issue, since Topology updates are
* infrequent.
* <p>
* An update may result in the topology changes being pruned so that only
* the configured number of changes are retained.
* <p>
* This method has package access for unit test
* <p>
* A contract of this method is that, the topology Id, the change
* information and the signature should come from the same topology
* instance.
*
* @param topologyId the topology id associated with the changes
* @param changes the changes to be made to the current copy of the
* Topology
* @param topoSignature the signature of the topology where the changes
* originated.
*/
synchronized void update(long topologyId,
List<TopologyChange> changes,
byte[] topoSignature) {
/*
* The topology can be null if the node was were waiting for a topo
* push from another node, e.g. during replica start up.
*/
final Topology workingCopy = (topology == null) ?
new Topology(kvsName, topologyId) : topology.getCopy();
checkTopologyId(workingCopy.getId(), topologyId);
final int prevSequenceNumber = workingCopy.getSequenceNumber();
if (!workingCopy.apply(changes)) {
/* Topology not changed */
return;
}
workingCopy.updateSignature(topoSignature);
/*
* Pre-updater may verify the signature of new topology copy. If the
* verification fails, don't continue with the update;
*/
try {
invokePreUpdateListeners(workingCopy);
} catch (InvalidSignatureException ise) {
logger.log(Level.INFO,
"Topology incremental update to seq# {0} skipped " +
"due to invalid signature.",
workingCopy.getSequenceNumber());
return;
}
if (!updateLocalTopology(workingCopy)) {
return;
}
/* Make an atomic change. */
topology = workingCopy.pruneChanges(changes.get(0).getSequenceNumber(),
maxTopoChanges);
onLocalTopologyUpdated();
logger.log(Level.INFO,
"Topology incrementally updated from seq#: {0} to {1}",
new Object[]{prevSequenceNumber,
topology.getSequenceNumber()});
invokePostUpdateListeners();
}
public synchronized void update(TopologyInfo topoInfo) {
update(topoInfo.getTopoId(), topoInfo.getChanges(),
topoInfo.getTopoSignature());
}
/**
* Verifies that the remote topology being used to update the local
* topology is compatible with it. All pre r2 topologies or r2 topologies
* that are communicated by r1 clients have the topology id zero. They
* are assumed to match non-zero r2 topologies for compatibility.
*
* @param localTopoId the local topology id
* @param remoteTopoId the remote topology id
*/
private void checkTopologyId(long localTopoId,
long remoteTopoId) {
if (localTopoId == remoteTopoId) {
return;
}
// TODO: Remove if we decide not to support r1 clients with r2 RNs
if ((localTopoId == Topology.NOCHECK_TOPOLOGY_ID) ||
(remoteTopoId == Topology.NOCHECK_TOPOLOGY_ID)) {
return;
}
final String msg = "Inconsistent use of Topology. " +
"An attempt was made to update this topology created on " +
new Date(localTopoId) +
" with changes originating from a different topology created on " +
new Date(remoteTopoId) +
". This exception indicates an application configuration issue." +
" Check if this store handle belongs to an older, now defunct " +
"store.";
/*
* Note that we intentionally throw an operation fault exception,
* rather than IllegalStateException. The latter is a catastrophic
* exception that shuts down the RN process. This led to the bug
* described in SR [#24693], where connection attempts from old clients
* make an RN repeatedly throw IllegalStateException, which ultimately
* brings the RN down! Clients should never be able to create
* server side failure like that, so this has been changed to
* StoreStaleHandleException, so the client knows it has to close and
* reopen its handle.
*/
throw new WrappedClientException(new StaleStoreHandleException(msg));
}
/**
* Updates the local topology if possible. Returns true if the local
* topology was updated otherwise false. If the local topology is updated
* the listeners are invoked.
*
* @return true if local topology was updated
*/
public synchronized boolean updateLocalTopology() {
/*
* Special case of the topology not yet being initialized. In this case
* report that things are OK, but don't invoke the listeners.
*/
if (topology == null) {
return true;
}
if (!updateLocalTopology(topology)) {
return false;
}
invokePostUpdateListeners();
return true;
}
/**
* Updates the local topology if possible. Returns true if the local
* topology was updated otherwise false.
*
* @param newTopology the topology to localize
* @return true if localTopology was updated
*/
private boolean updateLocalTopology(Topology newTopology) {
if (localizer == null) {
return true;
}
final Topology local = localizer.localizeTopology(newTopology);
if (local == null) {
logger.log(Level.INFO, "Topology update to {0} skipped. " +
"Unable to update local topology.",
newTopology.getSequenceNumber());
return false;
}
localTopology = local;
return true;
}
/**
* Returns true if the partition is in the process
* of moving (changing groups) or has moved.
*/
public boolean inTransit(PartitionId partitionId) {
if (partitionId.isNull()) {
return false;
}
final RepGroupId localGroupId =
getLocalTopology().getRepGroupId(partitionId);
final RepGroupId currentGroupId =
getTopology().getRepGroupId(partitionId);
if ((localGroupId == null) || (currentGroupId == null)) {
return false;
}
/*
* If the local group has changed, then the partition is in transit.
*/
return localGroupId.getGroupId() != currentGroupId.getGroupId();
}
/**
* Enqueues a query for partition migration notifications. In order not to
* miss any notifications about migrated partitions, the query must be
* registered at the MigrationManager while holding the Object lock on the
* TopologyManager.
*/
public synchronized void addQuery(RuntimeControlBlock rcb) {
assert(localizer != null);
localizer.addQuery(rcb);
}
/**
* Dequeues a query after the scan and partition-migration data structure
* update is done.
*/
public synchronized void removeQuery(RuntimeControlBlock rcb) {
assert(localizer != null);
localizer.removeQuery(rcb);
}
/**
* Runs the provided runnable while holding the lock of this object.
*/
public synchronized <T> T callWithObjectLock(Callable<T> callable) {
try {
return callable.call();
} catch (Exception e) {
throw new RuntimeException("should not throw Exception", e);
}
}
public Topology getTopology(KVStore store,
int sequenceNumber,
long timeoutMillis)
throws TimeoutException
{
Topology cached = cache.get(sequenceNumber);
if (cached != null) {
return cached;
}
return scheduleReadTopologyOrWait(store, sequenceNumber, timeoutMillis);
}
private Topology scheduleReadTopologyOrWait(KVStore store,
int sequenceNumber,
long timeoutMillis)
throws TimeoutException
{
CompletableFuture<Topology> future;
synchronized(this) {
future = outstandingTopologyReads.get(sequenceNumber);
if (future == null) {
/*
* Check the cache again in case the request was completed in
* between the previous check and here.
*/
final Topology cached = cache.get(sequenceNumber);
if (cached != null) {
return cached;
}
future = scheduleReadTopology(store, sequenceNumber);
}
}
try {
return future.get(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
throw new IllegalStateException(
"Unexpected interruption during history topology read");
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
private CompletableFuture<Topology> scheduleReadTopology(
KVStore store,
int sequenceNumber)
{
final CompletableFuture<Topology> future = new CompletableFuture<>();
/* Puts the future in outstanding before we execute it. */
outstandingTopologyReads.put(sequenceNumber, future);
final Runnable task = () -> {
try {
TestHookExecute.doHookIfSet(
beforeReadTopologyFromStore, null);
final Topology topo =
readTopologyFromStore(store, sequenceNumber);
/*
* Puts the result in cache before we complete the future so
* that when the future is removed after completion, a
* subsequent check on the cache will be guaranteed to see the
* newly cached.
*/
cache.put(topo);
future.complete(topo);
} catch (Throwable t) {
future.completeExceptionally(t);
} finally {
synchronized(this) {
outstandingTopologyReads.remove(sequenceNumber);
}
}
};
/* Submit for execution. */
executor.submit(task);
return future;
}
private static Topology readTopologyFromStore(KVStore store,
int sequenceNumber) {
TableAPI tapi = store.getTableAPI();
Table table = tapi.getTable(TopologyHistoryDesc.TABLE_NAME);
PrimaryKey key = table.createPrimaryKey();
key.put(TopologyHistoryDesc.COL_SHARD_KEY,
TopologyHistoryWriteSysTableUtil.SHARD_KEY);
key.put(TopologyHistoryDesc.COL_NAME_TOPOLOGY_SEQUENCE_NUMBER,
sequenceNumber);
Row row = tapi.get(key, new ReadOptions(Consistency.ABSOLUTE, 0, null));
if (row == null) {
return null;
}
byte[] bytes =
row.get(TopologyHistoryDesc.COL_NAME_SERIALIZED_TOPOLOGY).
asBinary().get();
return SerializationUtil.getObject(bytes, Topology.class);
}
/* For testing. */
public int getOutstandingTopologyReadsCount() {
synchronized(this) {
return outstandingTopologyReads.size();
}
}
/* For testing. */
public TopologyHistoryCache getCache() {
return cache;
}
public void cleanUpCache() {
cache.cleanUp();
}
public interface PostUpdateListener {
/**
* The update method is invoked after either the "official" or "local"
* topology has been updated. Implementations must take care to avoid
* deadlocks as the topology manager instance will be locked at the
* time of the call to postUpdate().
*
* @return true if the listener is no longer needed and can be removed
* from the list
*/
boolean postUpdate(Topology topology);
}
public interface PreUpdateListener {
/**
* The update method is invoked before the "official" topology is
* updated. Exceptions resulting from the listener will abort the
* topology update operation. Implementations must take care to avoid
* deadlocks as the topology manager instance will be locked at the
* time of the call to preUpdate().
*/
void preUpdate(Topology topology);
}
public interface Localizer {
/**
* Localizes the specified topology. The localized topology is returned.
* The return value may be the input topology if no changes are made.
* Null is returned if it was not possible to localize the topology.
*
* @param topology the topology to localize
* @return a localized topology or null
*/
Topology localizeTopology(Topology topology);
public void addQuery(RuntimeControlBlock rcb);
public void removeQuery(RuntimeControlBlock rcb);
}
}
|
apache/tomcat80 | 36,986 | java/org/apache/tomcat/util/net/jsse/openssl/OpenSSLCipherConfigurationParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.net.jsse.openssl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.net.Constants;
import org.apache.tomcat.util.res.StringManager;
/**
* Class in charge with parsing openSSL expressions to define a list of ciphers.
*/
public class OpenSSLCipherConfigurationParser {
private static final Log log = LogFactory.getLog(OpenSSLCipherConfigurationParser.class);
private static final StringManager sm =
StringManager.getManager("org.apache.tomcat.util.net.jsse.res");
private static boolean initialized = false;
private static final String SEPARATOR = ":|,| ";
/**
* If ! is used then the ciphers are permanently deleted from the list. The ciphers deleted can never reappear in the list
* even if they are explicitly stated.
*/
private static final String EXCLUDE = "!";
/**
* If - is used then the ciphers are deleted from the list, but some or all of the ciphers can be added again by later
* options.
*/
private static final String DELETE = "-";
/**
* If + is used then the ciphers are moved to the end of the list. This option doesn't add any new ciphers it just moves
* matching existing ones.
*/
private static final String TO_END = "+";
/**
* Lists of cipher suites can be combined in a single cipher string using the + character.
* This is used as a logical and operation.
* For example SHA1+DES represents all cipher suites containing the SHA1 and the DES algorithms.
*/
private static final String AND = "+";
/**
* All ciphers by their openssl alias name.
*/
private static final Map<String, List<Cipher>> aliases = new LinkedHashMap<>();
/**
* the 'NULL' ciphers that is those offering no encryption. Because these offer no encryption at all and are a security risk
* they are disabled unless explicitly included.
*/
private static final String eNULL = "eNULL";
/**
* The cipher suites offering no authentication. This is currently the anonymous DH algorithms. T These cipher suites are
* vulnerable to a 'man in the middle' attack and so their use is normally discouraged.
*/
private static final String aNULL = "aNULL";
/**
* 'high' encryption cipher suites. This currently means those with key lengths larger than 128 bits, and some cipher suites
* with 128-bit keys.
*/
private static final String HIGH = "HIGH";
/**
* 'medium' encryption cipher suites, currently some of those using 128 bit encryption.
*/
private static final String MEDIUM = "MEDIUM";
/**
* 'low' encryption cipher suites, currently those using 64 or 56 bit encryption algorithms but excluding export cipher
* suites.
*/
private static final String LOW = "LOW";
/**
* Export encryption algorithms. Including 40 and 56 bits algorithms.
*/
private static final String EXPORT = "EXPORT";
/**
* 40 bit export encryption algorithms.
*/
private static final String EXPORT40 = "EXPORT40";
/**
* 56 bit export encryption algorithms.
*/
private static final String EXPORT56 = "EXPORT56";
/**
* Cipher suites using RSA key exchange.
*/
private static final String kRSA = "kRSA";
/**
* Cipher suites using RSA authentication.
*/
private static final String aRSA = "aRSA";
/**
* Cipher suites using RSA for key exchange
* Despite what the docs say, RSA is equivalent to kRSA.
*/
private static final String RSA = "RSA";
/**
* Cipher suites using ephemeral DH key agreement.
*/
private static final String kEDH = "kEDH";
/**
* Cipher suites using ephemeral DH key agreement.
*/
private static final String kDHE = "kDHE";
/**
* Cipher suites using ephemeral DH key agreement. equivalent to kEDH:-ADH
*/
private static final String EDH = "EDH";
/**
* Cipher suites using ephemeral DH key agreement. equivalent to kEDH:-ADH
*/
private static final String DHE = "DHE";
/**
* Cipher suites using DH key agreement and DH certificates signed by CAs with RSA keys.
*/
private static final String kDHr = "kDHr";
/**
* Cipher suites using DH key agreement and DH certificates signed by CAs with DSS keys.
*/
private static final String kDHd = "kDHd";
/**
* Cipher suites using DH key agreement and DH certificates signed by CAs with RSA or DSS keys.
*/
private static final String kDH = "kDH";
/**
* Cipher suites using fixed ECDH key agreement signed by CAs with RSA keys.
*/
private static final String kECDHr = "kECDHr";
/**
* Cipher suites using fixed ECDH key agreement signed by CAs with ECDSA keys.
*/
private static final String kECDHe = "kECDHe";
/**
* Cipher suites using fixed ECDH key agreement signed by CAs with RSA and ECDSA keys or either respectively.
*/
private static final String kECDH = "kECDH";
/**
* Cipher suites using ephemeral ECDH key agreement, including anonymous cipher suites.
*/
private static final String kEECDH = "kEECDH";
/**
* Cipher suites using ephemeral ECDH key agreement, excluding anonymous cipher suites.
* Same as "kEECDH:-AECDH"
*/
private static final String EECDH = "EECDH";
/**
* Cipher suitesusing ECDH key exchange, including anonymous, ephemeral and fixed ECDH.
*/
private static final String ECDH = "ECDH";
/**
* Cipher suites using ephemeral ECDH key agreement, including anonymous cipher suites.
*/
private static final String kECDHE = "kECDHE";
/**
* Cipher suites using authenticated ephemeral ECDH key agreement
*/
private static final String ECDHE = "ECDHE";
/**
* Cipher suites using authenticated ephemeral ECDH key agreement
*/
private static final String EECDHE = "EECDHE";
/**
* Anonymous Elliptic Curve Diffie Hellman cipher suites.
*/
private static final String AECDH = "AECDH";
/**
* Cipher suites using DSS for key exchange
*/
private static final String DSS = "DSS";
/**
* Cipher suites using DSS authentication, i.e. the certificates carry DSS keys.
*/
private static final String aDSS = "aDSS";
/**
* Cipher suites effectively using DH authentication, i.e. the certificates carry DH keys.
*/
private static final String aDH = "aDH";
/**
* Cipher suites effectively using ECDH authentication, i.e. the certificates carry ECDH keys.
*/
private static final String aECDH = "aECDH";
/**
* Cipher suites effectively using ECDSA authentication, i.e. the certificates carry ECDSA keys.
*/
private static final String aECDSA = "aECDSA";
/**
* Cipher suites effectively using ECDSA authentication, i.e. the certificates carry ECDSA keys.
*/
private static final String ECDSA = "ECDSA";
/**
* Ciphers suites using FORTEZZA key exchange algorithms.
*/
private static final String kFZA = "kFZA";
/**
* Ciphers suites using FORTEZZA authentication algorithms.
*/
private static final String aFZA = "aFZA";
/**
* Ciphers suites using FORTEZZA encryption algorithms.
*/
private static final String eFZA = "eFZA";
/**
* Ciphers suites using all FORTEZZA algorithms.
*/
private static final String FZA = "FZA";
/**
* Cipher suites using DH, including anonymous DH, ephemeral DH and fixed DH.
*/
private static final String DH = "DH";
/**
* Anonymous DH cipher suites.
*/
private static final String ADH = "ADH";
/**
* Cipher suites using 128 bit AES.
*/
private static final String AES128 = "AES128";
/**
* Cipher suites using 256 bit AES.
*/
private static final String AES256 = "AES256";
/**
* Cipher suites using either 128 or 256 bit AES.
*/
private static final String AES = "AES";
/**
* AES in Galois Counter Mode (GCM): these cipher suites are only supported in TLS v1.2.
*/
private static final String AESGCM = "AESGCM";
/**
* AES in Counter with CBC-MAC Mode (CCM).
*/
private static final String AESCCM = "AESCCM";
/**
* AES in Counter with CBC-MAC Mode and 8-byte authentication (CCM8).
*/
private static final String AESCCM8 = "AESCCM8";
/**
* Cipher suites using 128 bit CAMELLIA.
*/
private static final String CAMELLIA128 = "CAMELLIA128";
/**
* Cipher suites using 256 bit CAMELLIA.
*/
private static final String CAMELLIA256 = "CAMELLIA256";
/**
* Cipher suites using either 128 or 256 bit CAMELLIA.
*/
private static final String CAMELLIA = "CAMELLIA";
/**
* Cipher suites using CHACHA20.
*/
private static final String CHACHA20 = "CHACHA20";
/**
* Cipher suites using triple DES.
*/
private static final String TRIPLE_DES = "3DES";
/**
* Cipher suites using DES (not triple DES).
*/
private static final String DES = "DES";
/**
* Cipher suites using RC4.
*/
private static final String RC4 = "RC4";
/**
* Cipher suites using RC2.
*/
private static final String RC2 = "RC2";
/**
* Cipher suites using IDEA.
*/
private static final String IDEA = "IDEA";
/**
* Cipher suites using SEED.
*/
private static final String SEED = "SEED";
/**
* Cipher suites using MD5.
*/
private static final String MD5 = "MD5";
/**
* Cipher suites using SHA1.
*/
private static final String SHA1 = "SHA1";
/**
* Cipher suites using SHA1.
*/
private static final String SHA = "SHA";
/**
* Cipher suites using SHA256.
*/
private static final String SHA256 = "SHA256";
/**
* Cipher suites using SHA384.
*/
private static final String SHA384 = "SHA384";
/**
* Cipher suites using KRB5.
*/
private static final String KRB5 = "KRB5";
/**
* Cipher suites using GOST R 34.10 (either 2001 or 94) for authentication.
*/
private static final String aGOST = "aGOST";
/**
* Cipher suites using GOST R 34.10-2001 for authentication.
*/
private static final String aGOST01 = "aGOST01";
/**
* Cipher suites using GOST R 34.10-94 authentication (note that R 34.10-94 standard has been expired so use GOST R
* 34.10-2001)
*/
private static final String aGOST94 = "aGOST94";
/**
* Cipher suites using using VKO 34.10 key exchange, specified in the RFC 4357.
*/
private static final String kGOST = "kGOST";
/**
* Cipher suites, using HMAC based on GOST R 34.11-94.
*/
private static final String GOST94 = "GOST94";
/**
* Cipher suites using GOST 28147-89 MAC instead of HMAC.
*/
private static final String GOST89MAC = "GOST89MAC";
/**
* Cipher suites using SRP authentication, specified in the RFC 5054.
*/
private static final String aSRP = "aSRP";
/**
* Cipher suites using SRP key exchange, specified in the RFC 5054.
*/
private static final String kSRP = "kSRP";
/**
* Same as kSRP
*/
private static final String SRP = "SRP";
/**
* Cipher suites using pre-shared keys (PSK).
*/
private static final String PSK = "PSK";
/**
* Cipher suites using PSK authentication.
*/
private static final String aPSK = "aPSK";
/**
* Cipher suites using PSK key 'exchange'.
*/
private static final String kPSK = "kPSK";
private static final String kRSAPSK = "kRSAPSK";
private static final String kECDHEPSK = "kECDHEPSK";
private static final String kDHEPSK = "kDHEPSK";
private static final String DEFAULT = "DEFAULT";
private static final String COMPLEMENTOFDEFAULT = "COMPLEMENTOFDEFAULT";
private static final String ALL = "ALL";
private static final String COMPLEMENTOFALL = "COMPLEMENTOFALL";
private static final Map<String,String> jsseToOpenSSL = new HashMap<>();
private static final void init() {
for (Cipher cipher : Cipher.values()) {
String alias = cipher.getOpenSSLAlias();
if (aliases.containsKey(alias)) {
aliases.get(alias).add(cipher);
} else {
List<Cipher> list = new ArrayList<>();
list.add(cipher);
aliases.put(alias, list);
}
aliases.put(cipher.name(), Collections.singletonList(cipher));
for (String openSSlAltName : cipher.getOpenSSLAltNames()) {
if (aliases.containsKey(openSSlAltName)) {
aliases.get(openSSlAltName).add(cipher);
} else {
List<Cipher> list = new ArrayList<>();
list.add(cipher);
aliases.put(openSSlAltName, list);
}
}
jsseToOpenSSL.put(cipher.name(), cipher.getOpenSSLAlias());
Set<String> jsseNames = cipher.getJsseNames();
for (String jsseName : jsseNames) {
jsseToOpenSSL.put(jsseName, cipher.getOpenSSLAlias());
}
}
List<Cipher> allCiphersList = Arrays.asList(Cipher.values());
Collections.reverse(allCiphersList);
LinkedHashSet<Cipher> allCiphers = defaultSort(new LinkedHashSet<>(allCiphersList));
addListAlias(eNULL, filterByEncryption(allCiphers, Collections.singleton(Encryption.eNULL)));
LinkedHashSet<Cipher> all = new LinkedHashSet<>(allCiphers);
remove(all, eNULL);
addListAlias(ALL, all);
addListAlias(HIGH, filterByEncryptionLevel(allCiphers, Collections.singleton(EncryptionLevel.HIGH)));
addListAlias(MEDIUM, filterByEncryptionLevel(allCiphers, Collections.singleton(EncryptionLevel.MEDIUM)));
addListAlias(LOW, filterByEncryptionLevel(allCiphers, Collections.singleton(EncryptionLevel.LOW)));
addListAlias(EXPORT, filterByEncryptionLevel(allCiphers, new HashSet<>(Arrays.asList(EncryptionLevel.EXP40, EncryptionLevel.EXP56))));
aliases.put("EXP", aliases.get(EXPORT));
addListAlias(EXPORT40, filterByEncryptionLevel(allCiphers, Collections.singleton(EncryptionLevel.EXP40)));
addListAlias(EXPORT56, filterByEncryptionLevel(allCiphers, Collections.singleton(EncryptionLevel.EXP56)));
aliases.put("NULL", aliases.get(eNULL));
aliases.put(COMPLEMENTOFALL, aliases.get(eNULL));
addListAlias(aNULL, filterByAuthentication(allCiphers, Collections.singleton(Authentication.aNULL)));
addListAlias(kRSA, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.RSA)));
addListAlias(aRSA, filterByAuthentication(allCiphers, Collections.singleton(Authentication.RSA)));
// Despite what the docs say, RSA is equivalent to kRSA
aliases.put(RSA, aliases.get(kRSA));
addListAlias(kEDH, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EDH)));
addListAlias(kDHE, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EDH)));
Set<Cipher> edh = filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EDH));
edh.removeAll(filterByAuthentication(allCiphers, Collections.singleton(Authentication.aNULL)));
addListAlias(EDH, edh);
addListAlias(DHE, edh);
addListAlias(kDHr, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.DHr)));
addListAlias(kDHd, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.DHd)));
addListAlias(kDH, filterByKeyExchange(allCiphers, new HashSet<>(Arrays.asList(KeyExchange.DHr, KeyExchange.DHd))));
addListAlias(kECDHr, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.ECDHr)));
addListAlias(kECDHe, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.ECDHe)));
addListAlias(kECDH, filterByKeyExchange(allCiphers, new HashSet<>(Arrays.asList(KeyExchange.ECDHe, KeyExchange.ECDHr))));
addListAlias(ECDH, filterByKeyExchange(allCiphers, new HashSet<>(Arrays.asList(KeyExchange.ECDHe, KeyExchange.ECDHr, KeyExchange.EECDH))));
addListAlias(kECDHE, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EECDH)));
Set<Cipher> ecdhe = filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EECDH));
remove(ecdhe, aNULL);
addListAlias(ECDHE, ecdhe);
addListAlias(kEECDH, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EECDH)));
aliases.put(EECDHE, aliases.get(kEECDH));
Set<Cipher> eecdh = filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EECDH));
eecdh.removeAll(filterByAuthentication(allCiphers, Collections.singleton(Authentication.aNULL)));
addListAlias(EECDH, eecdh);
addListAlias(aDSS, filterByAuthentication(allCiphers, Collections.singleton(Authentication.DSS)));
aliases.put(DSS, aliases.get(aDSS));
addListAlias(aDH, filterByAuthentication(allCiphers, Collections.singleton(Authentication.DH)));
Set<Cipher> aecdh = filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EECDH));
addListAlias(AECDH, filterByAuthentication(aecdh, Collections.singleton(Authentication.aNULL)));
addListAlias(aECDH, filterByAuthentication(allCiphers, Collections.singleton(Authentication.ECDH)));
addListAlias(ECDSA, filterByAuthentication(allCiphers, Collections.singleton(Authentication.ECDSA)));
aliases.put(aECDSA, aliases.get(ECDSA));
addListAlias(kFZA, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.FZA)));
addListAlias(aFZA, filterByAuthentication(allCiphers, Collections.singleton(Authentication.FZA)));
addListAlias(eFZA, filterByEncryption(allCiphers, Collections.singleton(Encryption.FZA)));
addListAlias(FZA, filter(allCiphers, null, Collections.singleton(KeyExchange.FZA), Collections.singleton(Authentication.FZA), Collections.singleton(Encryption.FZA), null, null));
addListAlias(Constants.SSL_PROTO_TLSv1_2, filterByProtocol(allCiphers, Collections.singleton(Protocol.TLSv1_2)));
addListAlias(Constants.SSL_PROTO_TLSv1_0, filterByProtocol(allCiphers, Collections.singleton(Protocol.TLSv1)));
addListAlias(Constants.SSL_PROTO_SSLv3, filterByProtocol(allCiphers, Collections.singleton(Protocol.SSLv3)));
aliases.put(Constants.SSL_PROTO_TLSv1, aliases.get(Constants.SSL_PROTO_TLSv1_0));
addListAlias(Constants.SSL_PROTO_SSLv2, filterByProtocol(allCiphers, Collections.singleton(Protocol.SSLv2)));
addListAlias(DH, filterByKeyExchange(allCiphers, new HashSet<>(Arrays.asList(KeyExchange.DHr, KeyExchange.DHd, KeyExchange.EDH))));
Set<Cipher> adh = filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.EDH));
adh.retainAll(filterByAuthentication(allCiphers, Collections.singleton(Authentication.aNULL)));
addListAlias(ADH, adh);
addListAlias(AES128, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES128, Encryption.AES128CCM, Encryption.AES128CCM8, Encryption.AES128GCM))));
addListAlias(AES256, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES256, Encryption.AES256CCM, Encryption.AES256CCM8, Encryption.AES256GCM))));
addListAlias(AES, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES128, Encryption.AES128CCM, Encryption.AES128CCM8, Encryption.AES128GCM, Encryption.AES256, Encryption.AES256CCM, Encryption.AES256CCM8, Encryption.AES256GCM))));
addListAlias(AESGCM, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES128GCM, Encryption.AES256GCM))));
addListAlias(AESCCM, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES128CCM, Encryption.AES128CCM8, Encryption.AES256CCM, Encryption.AES256CCM8))));
addListAlias(AESCCM8, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.AES128CCM8, Encryption.AES256CCM8))));
addListAlias(CAMELLIA, filterByEncryption(allCiphers, new HashSet<>(Arrays.asList(Encryption.CAMELLIA128, Encryption.CAMELLIA256))));
addListAlias(CAMELLIA128, filterByEncryption(allCiphers, Collections.singleton(Encryption.CAMELLIA128)));
addListAlias(CAMELLIA256, filterByEncryption(allCiphers, Collections.singleton(Encryption.CAMELLIA256)));
addListAlias(CHACHA20, filterByEncryption(allCiphers, Collections.singleton(Encryption.CHACHA20POLY1305)));
addListAlias(TRIPLE_DES, filterByEncryption(allCiphers, Collections.singleton(Encryption.TRIPLE_DES)));
addListAlias(DES, filterByEncryption(allCiphers, Collections.singleton(Encryption.DES)));
addListAlias(RC4, filterByEncryption(allCiphers, Collections.singleton(Encryption.RC4)));
addListAlias(RC2, filterByEncryption(allCiphers, Collections.singleton(Encryption.RC2)));
addListAlias(IDEA, filterByEncryption(allCiphers, Collections.singleton(Encryption.IDEA)));
addListAlias(SEED, filterByEncryption(allCiphers, Collections.singleton(Encryption.SEED)));
addListAlias(MD5, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.MD5)));
addListAlias(SHA1, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.SHA1)));
aliases.put(SHA, aliases.get(SHA1));
addListAlias(SHA256, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.SHA256)));
addListAlias(SHA384, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.SHA384)));
addListAlias(aGOST, filterByAuthentication(allCiphers, new HashSet<>(Arrays.asList(Authentication.GOST01, Authentication.GOST94))));
addListAlias(aGOST01, filterByAuthentication(allCiphers, Collections.singleton(Authentication.GOST01)));
addListAlias(aGOST94, filterByAuthentication(allCiphers, Collections.singleton(Authentication.GOST94)));
addListAlias(kGOST, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.GOST)));
addListAlias(GOST94, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.GOST94)));
addListAlias(GOST89MAC, filterByMessageDigest(allCiphers, Collections.singleton(MessageDigest.GOST89MAC)));
addListAlias(PSK, filter(allCiphers, null, new HashSet<>(Arrays.asList(KeyExchange.PSK, KeyExchange.RSAPSK, KeyExchange.DHEPSK, KeyExchange.ECDHEPSK)), Collections.singleton(Authentication.PSK), null, null, null));
addListAlias(aPSK, filterByAuthentication(allCiphers, Collections.singleton(Authentication.PSK)));
addListAlias(kPSK, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.PSK)));
addListAlias(kRSAPSK, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.RSAPSK)));
addListAlias(kECDHEPSK, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.ECDHEPSK)));
addListAlias(kDHEPSK, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.DHEPSK)));
addListAlias(KRB5, filter(allCiphers, null, Collections.singleton(KeyExchange.KRB5), Collections.singleton(Authentication.KRB5), null, null, null));
addListAlias(aSRP, filterByAuthentication(allCiphers, Collections.singleton(Authentication.SRP)));
addListAlias(kSRP, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.SRP)));
addListAlias(SRP, filterByKeyExchange(allCiphers, Collections.singleton(KeyExchange.SRP)));
initialized = true;
// Despite what the OpenSSL docs say, DEFAULT also excludes SSLv2
addListAlias(DEFAULT, parse("ALL:!EXPORT:!eNULL:!aNULL:!SSLv2:!DES:!RC2:!RC4:!DSS:!SEED:!IDEA:!CAMELLIA:!AESCCM:!3DES"));
// COMPLEMENTOFDEFAULT is also not exactly as defined by the docs
LinkedHashSet<Cipher> complementOfDefault = filterByKeyExchange(all, new HashSet<>(Arrays.asList(KeyExchange.EDH,KeyExchange.EECDH)));
complementOfDefault = filterByAuthentication(complementOfDefault, Collections.singleton(Authentication.aNULL));
complementOfDefault.removeAll(aliases.get(eNULL));
complementOfDefault.addAll(aliases.get(Constants.SSL_PROTO_SSLv2));
complementOfDefault.addAll(aliases.get(EXPORT));
complementOfDefault.addAll(aliases.get(DES));
complementOfDefault.addAll(aliases.get(TRIPLE_DES));
complementOfDefault.addAll(aliases.get(RC2));
complementOfDefault.addAll(aliases.get(RC4));
complementOfDefault.addAll(aliases.get(aDSS));
complementOfDefault.addAll(aliases.get(SEED));
complementOfDefault.addAll(aliases.get(IDEA));
complementOfDefault.addAll(aliases.get(CAMELLIA));
complementOfDefault.addAll(aliases.get(AESCCM));
defaultSort(complementOfDefault);
addListAlias(COMPLEMENTOFDEFAULT, complementOfDefault);
}
static void addListAlias(String alias, Set<Cipher> ciphers) {
aliases.put(alias, new ArrayList<>(ciphers));
}
static void moveToEnd(final LinkedHashSet<Cipher> ciphers, final String alias) {
moveToEnd(ciphers, aliases.get(alias));
}
static void moveToEnd(final LinkedHashSet<Cipher> ciphers, final Collection<Cipher> toBeMovedCiphers) {
List<Cipher> movedCiphers = new ArrayList<>(toBeMovedCiphers);
movedCiphers.retainAll(ciphers);
ciphers.removeAll(movedCiphers);
ciphers.addAll(movedCiphers);
}
static void moveToStart(final LinkedHashSet<Cipher> ciphers, final Collection<Cipher> toBeMovedCiphers) {
List<Cipher> movedCiphers = new ArrayList<>(toBeMovedCiphers);
List<Cipher> originalCiphers = new ArrayList<>(ciphers);
movedCiphers.retainAll(ciphers);
ciphers.clear();
ciphers.addAll(movedCiphers);
ciphers.addAll(originalCiphers);
}
static void add(final LinkedHashSet<Cipher> ciphers, final String alias) {
ciphers.addAll(aliases.get(alias));
}
static void remove(final Set<Cipher> ciphers, final String alias) {
ciphers.removeAll(aliases.get(alias));
}
static LinkedHashSet<Cipher> strengthSort(final LinkedHashSet<Cipher> ciphers) {
/*
* This routine sorts the ciphers with descending strength. The sorting
* must keep the pre-sorted sequence, so we apply the normal sorting
* routine as '+' movement to the end of the list.
*/
Set<Integer> keySizes = new HashSet<>();
for (Cipher cipher : ciphers) {
keySizes.add(Integer.valueOf(cipher.getStrength_bits()));
}
List<Integer> strength_bits = new ArrayList<>(keySizes);
Collections.sort(strength_bits);
Collections.reverse(strength_bits);
final LinkedHashSet<Cipher> result = new LinkedHashSet<>(ciphers);
for (int strength : strength_bits) {
moveToEnd(result, filterByStrengthBits(ciphers, strength));
}
return result;
}
/*
* See
* https://github.com/openssl/openssl/blob/7c96dbcdab959fef74c4caae63cdebaa354ab252/ssl/ssl_ciph.c#L1371
*/
static LinkedHashSet<Cipher> defaultSort(final LinkedHashSet<Cipher> ciphers) {
final LinkedHashSet<Cipher> result = new LinkedHashSet<>(ciphers.size());
final LinkedHashSet<Cipher> ecdh = new LinkedHashSet<>(ciphers.size());
/* Everything else being equal, prefer ephemeral ECDH over other key exchange mechanisms */
ecdh.addAll(filterByKeyExchange(ciphers, Collections.singleton(KeyExchange.EECDH)));
/* AES is our preferred symmetric cipher */
Set<Encryption> aes = new HashSet<>(Arrays.asList(Encryption.AES128, Encryption.AES128CCM,
Encryption.AES128CCM8, Encryption.AES128GCM, Encryption.AES256,
Encryption.AES256CCM, Encryption.AES256CCM8, Encryption.AES256GCM));
/* Now arrange all ciphers by preference: */
result.addAll(filterByEncryption(ecdh, aes));
result.addAll(filterByEncryption(ciphers, aes));
/* Add everything else */
result.addAll(ecdh);
result.addAll(ciphers);
/* Low priority for MD5 */
moveToEnd(result, filterByMessageDigest(result, Collections.singleton(MessageDigest.MD5)));
/* Move anonymous ciphers to the end. Usually, these will remain disabled.
* (For applications that allow them, they aren't too bad, but we prefer
* authenticated ciphers.) */
moveToEnd(result, filterByAuthentication(result, Collections.singleton(Authentication.aNULL)));
/* Move ciphers without forward secrecy to the end */
moveToEnd(result, filterByAuthentication(result, Collections.singleton(Authentication.ECDH)));
moveToEnd(result, filterByKeyExchange(result, Collections.singleton(KeyExchange.RSA)));
moveToEnd(result, filterByKeyExchange(result, Collections.singleton(KeyExchange.PSK)));
/* RC4 is sort-of broken -- move the the end */
moveToEnd(result, filterByEncryption(result, Collections.singleton(Encryption.RC4)));
return strengthSort(result);
}
static Set<Cipher> filterByStrengthBits(Set<Cipher> ciphers, int strength_bits) {
Set<Cipher> result = new LinkedHashSet<>(ciphers.size());
for (Cipher cipher : ciphers) {
if (cipher.getStrength_bits() == strength_bits) {
result.add(cipher);
}
}
return result;
}
static Set<Cipher> filterByProtocol(Set<Cipher> ciphers, Set<Protocol> protocol) {
return filter(ciphers, protocol, null, null, null, null, null);
}
static LinkedHashSet<Cipher> filterByKeyExchange(Set<Cipher> ciphers, Set<KeyExchange> kx) {
return filter(ciphers, null, kx, null, null, null, null);
}
static LinkedHashSet<Cipher> filterByAuthentication(Set<Cipher> ciphers, Set<Authentication> au) {
return filter(ciphers, null, null, au, null, null, null);
}
static Set<Cipher> filterByEncryption(Set<Cipher> ciphers, Set<Encryption> enc) {
return filter(ciphers, null, null, null, enc, null, null);
}
static Set<Cipher> filterByEncryptionLevel(Set<Cipher> ciphers, Set<EncryptionLevel> level) {
return filter(ciphers, null, null, null, null, level, null);
}
static Set<Cipher> filterByMessageDigest(Set<Cipher> ciphers, Set<MessageDigest> mac) {
return filter(ciphers, null, null, null, null, null, mac);
}
static LinkedHashSet<Cipher> filter(Set<Cipher> ciphers, Set<Protocol> protocol, Set<KeyExchange> kx,
Set<Authentication> au, Set<Encryption> enc, Set<EncryptionLevel> level, Set<MessageDigest> mac) {
LinkedHashSet<Cipher> result = new LinkedHashSet<>(ciphers.size());
for (Cipher cipher : ciphers) {
if (protocol != null && protocol.contains(cipher.getProtocol())) {
result.add(cipher);
}
if (kx != null && kx.contains(cipher.getKx())) {
result.add(cipher);
}
if (au != null && au.contains(cipher.getAu())) {
result.add(cipher);
}
if (enc != null && enc.contains(cipher.getEnc())) {
result.add(cipher);
}
if (level != null && level.contains(cipher.getLevel())) {
result.add(cipher);
}
if (mac != null && mac.contains(cipher.getMac())) {
result.add(cipher);
}
}
return result;
}
public static LinkedHashSet<Cipher> parse(String expression) {
if (!initialized) {
init();
}
String[] elements = expression.split(SEPARATOR);
LinkedHashSet<Cipher> ciphers = new LinkedHashSet<>();
Set<Cipher> removedCiphers = new HashSet<>();
for (String element : elements) {
if (element.startsWith(DELETE)) {
String alias = element.substring(1);
if (aliases.containsKey(alias)) {
remove(ciphers, alias);
}
} else if (element.startsWith(EXCLUDE)) {
String alias = element.substring(1);
if (aliases.containsKey(alias)) {
removedCiphers.addAll(aliases.get(alias));
} else {
log.warn(sm.getString("jsse.openssl.unknownElement", alias));
}
} else if (element.startsWith(TO_END)) {
String alias = element.substring(1);
if (aliases.containsKey(alias)) {
moveToEnd(ciphers, alias);
}
} else if ("@STRENGTH".equals(element)) {
strengthSort(ciphers);
break;
} else if (aliases.containsKey(element)) {
add(ciphers, element);
} else if (element.contains(AND)) {
String[] intersections = element.split("\\" + AND);
if(intersections.length > 0 && aliases.containsKey(intersections[0])) {
List<Cipher> result = new ArrayList<>(aliases.get(intersections[0]));
for(int i = 1; i < intersections.length; i++) {
if(aliases.containsKey(intersections[i])) {
result.retainAll(aliases.get(intersections[i]));
}
}
ciphers.addAll(result);
}
}
}
ciphers.removeAll(removedCiphers);
return ciphers;
}
public static List<String> convertForJSSE(Collection<Cipher> ciphers) {
List<String> result = new ArrayList<>(ciphers.size());
for (Cipher cipher : ciphers) {
result.addAll(cipher.getJsseNames());
}
if (log.isDebugEnabled()) {
log.debug(sm.getString("jsse.openssl.effectiveCiphers", displayResult(ciphers, true, ",")));
}
return result;
}
/**
* Parse the specified expression according to the OpenSSL syntax and
* returns a list of standard JSSE cipher names.
*
* @param expression the openssl expression to define a list of cipher.
* @return the corresponding list of ciphers.
*/
public static List<String> parseExpression(String expression) {
return convertForJSSE(parse(expression));
}
/**
* Converts a JSSE cipher name to an OpenSSL cipher name.
*
* @param jsseCipherName The JSSE name for a cipher
*
* @return The OpenSSL name for the specified JSSE cipher
*/
public static String jsseToOpenSSL(String jsseCipherName) {
if (!initialized) {
init();
}
return jsseToOpenSSL.get(jsseCipherName);
}
static String displayResult(Collection<Cipher> ciphers, boolean useJSSEFormat, String separator) {
if (ciphers.isEmpty()) {
return "";
}
StringBuilder builder = new StringBuilder(ciphers.size() * 16);
for (Cipher cipher : ciphers) {
if (useJSSEFormat) {
for (String name : cipher.getJsseNames()) {
builder.append(name);
builder.append(separator);
}
} else {
builder.append(cipher.getOpenSSLAlias());
}
builder.append(separator);
}
return builder.toString().substring(0, builder.length() - 1);
}
}
|
googleapis/google-cloud-java | 36,731 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/ListControlsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListControlsRequest}
*/
public final class ListControlsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.ListControlsRequest)
ListControlsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListControlsRequest.newBuilder() to construct.
private ListControlsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListControlsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListControlsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListControlsRequest.class,
com.google.cloud.retail.v2.ListControlsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.ListControlsRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.ListControlsRequest other =
(com.google.cloud.retail.v2.ListControlsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.ListControlsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListControlsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.ListControlsRequest)
com.google.cloud.retail.v2.ListControlsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListControlsRequest.class,
com.google.cloud.retail.v2.ListControlsRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2.ListControlsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_ListControlsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListControlsRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2.ListControlsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.ListControlsRequest build() {
com.google.cloud.retail.v2.ListControlsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListControlsRequest buildPartial() {
com.google.cloud.retail.v2.ListControlsRequest result =
new com.google.cloud.retail.v2.ListControlsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.ListControlsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.ListControlsRequest) {
return mergeFrom((com.google.cloud.retail.v2.ListControlsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.ListControlsRequest other) {
if (other == com.google.cloud.retail.v2.ListControlsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The catalog resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.retail.v2.ListControlsRequest.filter] is unset.
* * List controls that are used in a single ServingConfig:
* 'serving_config = "boosted_home_page_cvr"'
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.ListControlsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.ListControlsRequest)
private static final com.google.cloud.retail.v2.ListControlsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.ListControlsRequest();
}
public static com.google.cloud.retail.v2.ListControlsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListControlsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListControlsRequest>() {
@java.lang.Override
public ListControlsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListControlsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListControlsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListControlsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.